[med-svn] [python-pbcommand] 02/03: New upstream version 0.5.3

Sascha Steinbiss satta at debian.org
Sun Jan 15 09:57:56 UTC 2017


This is an automated email from the git hooks/post-receive script.

satta pushed a commit to branch master
in repository python-pbcommand.

commit 2869178de2fe744533dc3e4ac0def6a445441e5e
Author: Sascha Steinbiss <satta at debian.org>
Date:   Sun Jan 15 09:49:46 2017 +0000

    New upstream version 0.5.3
---
 Makefile                                           |  18 +-
 README.md                                          | 110 ++++-
 REQUIREMENTS.txt                                   |   4 +-
 REQUIREMENTS_TEST.txt                              |   9 +-
 circle.yml                                         |   2 +-
 docs/source/advanced_task_types.rst                | 128 ++++++
 docs/source/api.rst                                |  40 ++
 docs/source/commandline_interface.rst              |  95 ++--
 docs/source/conf.py                                |  11 +-
 docs/source/index.rst                              |   3 +
 docs/source/pbcommand.cli.rst                      |   8 +
 docs/source/pbcommand.engine.rst                   |   9 +
 docs/source/pbcommand.models.file_types.rst        |  11 +
 docs/source/pbcommand.models.rst                   |  11 +
 docs/source/pbcommand.models.rtc.rst               |  11 +
 docs/source/pbcommand.models.task_options.rst      |  11 +
 docs/source/pbcommand.models.tc.rst                |  11 +
 docs/source/pbcommand.models.view_rules.rst        |  11 +
 docs/source/pbcommand.pb_io.pipeline.rst           |   9 +
 docs/source/pbcommand.pb_io.tc.rst                 |   9 +
 docs/source/pbcommand.services.rst                 |   8 +
 docs/source/pbcommand.utils.rst                    |   9 +
 docs/source/report_model.rst                       | 192 ++++++++
 docs/source/services.rst                           |  43 +-
 pacbio-manifest.json                               |  11 +
 pbcommand/__init__.py                              |   2 +-
 pbcommand/cli/core.py                              |   6 +-
 pbcommand/cli/examples/dev_app.py                  |   0
 pbcommand/cli/examples/dev_gather_fasta_app.py     |   7 +-
 pbcommand/cli/examples/dev_mixed_app.py            |  30 +-
 pbcommand/cli/examples/dev_quick_hello_world.py    |  25 +-
 pbcommand/cli/examples/dev_scatter_fasta_app.py    |  17 +-
 pbcommand/cli/examples/dev_simple_app.py           |   0
 pbcommand/cli/quick.py                             |  81 +++-
 pbcommand/cli/utils.py                             |   0
 pbcommand/engine/runner.py                         |   7 +-
 pbcommand/interactive_resolver.py                  |  27 +-
 pbcommand/models/__init__.py                       |  12 +-
 pbcommand/models/common.py                         | 481 ++++++++++++++++++++-
 pbcommand/models/conditions.py                     |  32 ++
 pbcommand/models/parser.py                         | 232 +++++-----
 pbcommand/models/report.py                         | 460 ++++++++++++++++++--
 pbcommand/models/tool_contract.py                  |  75 +++-
 pbcommand/pb_io/__init__.py                        |   8 +-
 pbcommand/pb_io/common.py                          | 113 ++++-
 pbcommand/pb_io/conditions.py                      |  53 +++
 pbcommand/pb_io/report.py                          |  39 +-
 pbcommand/pb_io/tool_contract_io.py                |  55 ++-
 pbcommand/resolver.py                              |  40 +-
 pbcommand/schemas/__init__.py                      |  31 +-
 pbcommand/schemas/datastore.avsc                   |  89 ++++
 pbcommand/schemas/datastore_view_rules.avsc        |  56 +++
 pbcommand/schemas/pbreport.avsc                    |  56 ++-
 pbcommand/schemas/pipeline_presets.avsc            |  44 ++
 .../schemas/pipeline_template_view_rules.avsc      |  50 +++
 pbcommand/schemas/report_spec.avsc                 | 248 +++++++++++
 pbcommand/schemas/resolved_tool_contract.avsc      |  15 +-
 pbcommand/schemas/tool_contract.avsc               |  93 +++-
 pbcommand/services/cli.py                          | 205 ++++++++-
 pbcommand/services/models.py                       |  14 +-
 pbcommand/services/service_access_layer.py         | 102 ++++-
 pbcommand/services/utils.py                        |   6 +-
 pbcommand/testkit/base_utils.py                    |   0
 pbcommand/testkit/core.py                          |   2 +-
 pbcommand/utils.py                                 |  92 ++--
 pbcommand/validators.py                            |  53 ++-
 setup.py                                           |   1 -
 tests/base_utils.py                                |  19 +
 tests/data/dev_gather_fasta_app_tool_contract.json |  37 --
 .../example-conditions/reseq-conditions-01.json    |  24 +
 .../example-conditions/reseq-conditions-02.json    |  24 +
 .../example-reports/example_version_1_0_0.json     |  31 ++
 tests/data/example-reports/example_with_plot.json  |  24 +
 tests/data/example-reports/overview.json           |   3 +-
 tests/data/example-reports/test_report.json        |  40 ++
 tests/data/example-reports/test_report2.json       |  72 +++
 ...nd.tasks.dev_txt_custom_outs_tool_contract.json |  44 --
 ...bcommand.tasks.dev_txt_hello_tool_contract.json |  44 --
 .../pipeline-datastore-view-rules/rules_01.json    |  20 +
 .../pipeline-presets/example-pipeline-presets.json |  17 +
 tests/data/report-specs/report_spec.json           | 114 +++++
 .../dev_example_resolved_tool_contract.json        |   0
 .../dev_mixed_app_resolved_tool_contract.json      |  28 ++
 .../resolved_contract_01.json                      |   0
 .../resolved_tool_contract_dev_app.json            |   0
 .../dev_example_dev_txt_app_tool_contract.json     |  72 +--
 .../dev_example_tool_contract.json                 |  72 +--
 .../dev_gather_fasta_app_tool_contract.json        |  37 ++
 .../dev_mixed_app_tool_contract.json               | 224 ++++++++++
 .../dev_scatter_fasta_app_tool_contract.json       |  72 +--
 .../pbcommand.tasks.dev_app_tool_contract.json}    |  72 +--
 ...ommand.tasks.dev_fastq2fasta_tool_contract.json | 100 ++---
 ...mmand.tasks.dev_qhello_world_tool_contract.json |  74 ++--
 ...nd.tasks.dev_txt_custom_outs_tool_contract.json |  44 ++
 ...bcommand.tasks.dev_txt_hello_tool_contract.json |  44 ++
 .../dev_example_dev_txt_app_tool_contract.json     |  50 +++
 .../tool-contracts/dev_example_tool_contract.json  |  50 +++
 .../dev_gather_fasta_app_tool_contract.json        |  38 ++
 .../dev_mixed_app_tool_contract.json               | 102 +++++
 .../dev_scatter_fasta_app_tool_contract.json       |  50 +++
 tests/data/tool-contracts/makefile                 |   4 +
 .../pbcommand.tasks.dev_app_tool_contract.json     |  50 +++
 ...ommand.tasks.dev_fastq2fasta_tool_contract.json |  53 +++
 ...mmand.tasks.dev_qhello_world_tool_contract.json |  46 ++
 ...nd.tasks.dev_txt_custom_outs_tool_contract.json |  45 ++
 ...bcommand.tasks.dev_txt_hello_tool_contract.json |  45 ++
 tests/test_e2e_example_apps.py                     |  65 ++-
 tests/test_load_resolved_tool_contract.py          |   6 +-
 tests/test_models_common_pacbio_options.py         | 167 +++++++
 tests/test_models_report.py                        | 221 ++++++++--
 tests/test_models_report_attribute.py              |   7 -
 tests/test_parsers.py                              |  25 ++
 tests/test_pb_io_conditions.py                     |  48 ++
 tests/test_pb_io_report.py                         |   3 +
 tests/test_pb_io_tool_contract.py                  |  10 +-
 tests/test_pb_io_tool_contract_v1.py               |  36 ++
 tests/test_resolver.py                             |  69 ++-
 tests/test_schema_validation.py                    |  69 ++-
 tests/test_utils.py                                |  23 +-
 tox.ini                                            |   2 +-
 120 files changed, 5450 insertions(+), 929 deletions(-)

diff --git a/Makefile b/Makefile
index fc0100d..f618267 100644
--- a/Makefile
+++ b/Makefile
@@ -20,17 +20,19 @@ clean:
 	rm -rf docs/_build
 
 test:
-	tox
+	nosetests -s --verbose --with-xunit --logging-config log_nose.cfg tests/test_*.py
 
 doc:
 	cd docs && make html
 
 build-tool-contracts:
-	python -m pbcommand.cli.examples.dev_app --emit-tool-contract > ./tests/data/dev_example_tool_contract.json
-	python -m pbcommand.cli.examples.dev_txt_app --emit-tool-contract > ./tests/data/dev_example_dev_txt_app_tool_contract.json
-	python -m pbcommand.cli.examples.dev_gather_fasta_app --emit-tool-contract > ./tests/data/dev_gather_fasta_app_tool_contract.json
-	python -m pbcommand.cli.examples.dev_scatter_fasta_app --emit-tool-contract > ./tests/data/dev_scatter_fasta_app_tool_contract.json
-	python -m pbcommand.cli.examples.dev_quick_hello_world emit-tool-contracts -o ./tests/data
+	python -m pbcommand.cli.examples.dev_app --emit-tool-contract > ./tests/data/tool-contracts/pbcommand.tasks.dev_app_tool_contract.json
+	python -m pbcommand.cli.examples.dev_app --emit-tool-contract > ./tests/data/tool-contracts/dev_example_tool_contract.json
+	python -m pbcommand.cli.examples.dev_txt_app --emit-tool-contract > ./tests/data/tool-contracts/dev_example_dev_txt_app_tool_contract.json
+	python -m pbcommand.cli.examples.dev_mixed_app --emit-tool-contract > ./tests/data/tool-contracts/dev_mixed_app_tool_contract.json
+	python -m pbcommand.cli.examples.dev_gather_fasta_app --emit-tool-contract > ./tests/data/tool-contracts/dev_gather_fasta_app_tool_contract.json
+	python -m pbcommand.cli.examples.dev_scatter_fasta_app --emit-tool-contract > ./tests/data/tool-contracts/dev_scatter_fasta_app_tool_contract.json
+	python -m pbcommand.cli.examples.dev_quick_hello_world emit-tool-contracts -o ./tests/data/tool-contracts
 
 run-pep8:
 	find pbcommand -name "*.py" -exec pep8 --ignore=E501,E265,E731,E402,W292 {} \;
@@ -44,3 +46,7 @@ build-java-classes:
 extract-readme-snippets:
 	rm -rf readme-snippet-*.py
 	pandoc -t markdown README.md  | pandoc --filter ./extract-readme-snippets.py
+
+build-avro-schema-docs:
+	# this requires nodejs + https://github.com/ept/avrodoc
+	avrodoc pbcommand/schemas/*.avsc > index.html
diff --git a/README.md b/README.md
index 6da4870..ca352f7 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
-pbcommand High Level Overview
-=============================
+## pbcommand High Level Overview
+
 
 co-owners:
 
@@ -13,8 +13,93 @@ PacBio Officially Supported Library. Note the APIs are still in flux and not yet
 
 [![Circle CI](https://circleci.com/gh/PacificBiosciences/pbcommand.svg?style=svg)](https://circleci.com/gh/PacificBiosciences/pbcommand)
 
+
+
+### Components
+
 PacBio library for common utils, models, and tools to interface with pbsmrtpipe workflow engine.
 
+1. Common Models and Schemas
+2. Service client layer to the SMRTLink services
+3. Tool Contract and Resolved Tool Contract interface for integrating with pbsmrtpipe and SMRT Link
+
+
+## PacBio Core Models and Schemas
+
+- [ToolContract](https://github.com/PacificBiosciences/pbcommand/blob/master/pbcommand/schemas/tool_contract.avsc) : Used in define task interfaces and used in pipeline definitions (i.e., Resolved Pipeline Templates)
+- [ResolvedToolContract](https://github.com/PacificBiosciences/pbcommand/blob/master/pbcommand/schemas/resolved_tool_contract.avsc): Used in pipeline running
+- [PacBio Report](https://github.com/PacificBiosciences/pbcommand/blob/master/pbcommand/schemas/pbreport.avsc) Core data model for plots, tables and metrics (i.e., attributes) displayed in SMRTLink or available as output from SMRT Link web services) [Examples](https://github.com/PacificBiosciences/pbcommand/tree/master/tests/data/example-reports)
+- [PacBio DataStore](https://github.com/PacificBiosciences/pbcommand/blob/master/pbcommand/schemas/datastore.avsc): JSON file of datastore file(s) that are emitted from an analysis, import-dataset or other job type. 
+- [DataStore View Rules](https://github.com/PacificBiosciences/pbcommand/blob/master/pbcommand/schemas/datastore_view_rules.avsc) Custom views of datastore files in SMRTLink)
+- [TODO] Pipeline Template View Rules: Custom hiding and renaming of pipeline task options for SMRTLink UI) [Examples](https://github.com/PacificBiosciences/smrtflow/tree/master/smrt-server-analysis/src/main/resources/pipeline-template-view-rules)
+- [TODO] Resolved Pipeline Template: Custom description of pipeline, bindings and task options for SMRTLink UI [Examples](https://github.com/PacificBiosciences/smrtflow/blob/master/smrt-server-analysis/src/main/resources/resolved-pipeline-templates/pbsmrtpipe.pipelines.dev_diagnostic_pipeline_template.json)
+- [TODO] Report View Rules [Examples](https://github.com/PacificBiosciences/smrtflow/tree/master/smrt-server-analysis/src/main/resources/report-view-rules) 
+
+An HTML view of these models can be generated by using [AvroDoc](https://github.com/ept/avrodoc)
+
+The Avro schema format can be converted to JSON Schema using [Avro To Json Schema](https://json-schema-validator.herokuapp.com)
+
+
+## Service Client Layer to SMRT Link
+
+pbcommand provides a high level interface to the SMRT Link services. See `pbcommand.services` for more details.
+ 
+Here's a terse example of getting a Job by id and fetching the report metrics from the results.
+ 
+```
+IPython 5.1.0 -- An enhanced Interactive Python.
+?         -> Introduction and overview of IPython's features.
+%quickref -> Quick reference.
+help      -> Python's own help system.
+object?   -> Details about 'object', use 'object??' for extra details.
+
+In [1]: from pbcommand.services import ServiceAccessLayer
+
+In [2]: s = ServiceAccessLayer("smrtlink-beta", 8081)
+
+In [3]: j = s.get_analysis_job_by_id(22270)
+
+In [4]: j.id, j.name, j.path
+Out[4]: 
+(22270,
+ 'm54088_160819_000654_resequencing',
+ '/pbi/dept/secondary/siv/smrtlink/smrtlink-beta/smrtsuite_166987/userdata/jobs_root/022/022270')
+
+In [5]: j
+Out[5]: ServiceJob(id=22270, uuid=u'4443d0b6-899c-40b9-98f2-2e2b4f889b53', name='m54088_160819_000654_resequencing', state='SUCCESSFUL', path='/pbi/dept/secondary/siv/smrtlink/smrtlink-beta/smrtsuite_166987/userdata/jobs_root/022/022270', job_type='pbsmrtpipe', created_at=datetime.datetime(2016, 8, 20, 1, 25, 50, 874000, tzinfo=<iso8601.Utc>), settings={u'workflowOptions': [], u'entryPoints': [{u'fileTypeId': u'PacBio.DataSet.ReferenceSet', u'entryId': u'eid_ref_dataset', u'datasetId': 9 [...]
+
+In [6]: report_metrics = s.get_analysis_job_report_attrs(22270)
+
+In [7]: report_metrics
+Out[7]: 
+{u'coverage.depth_coverage_mean': 11251.832147121406,
+ u'coverage.missing_bases_pct': 0.004559547692868867,
+ u'mapping_stats.mapped_readlength_max': 15478,
+ u'mapping_stats.mapped_readlength_mean': 3899,
+ u'mapping_stats.mapped_readlength_n50': 5844,
+ u'mapping_stats.mapped_readlength_q95': 9560,
+ u'mapping_stats.mapped_reads_n': 207986,
+ u'mapping_stats.mapped_subread_bases_n': 810462596,
+ u'mapping_stats.mapped_subread_concordance_mean': 0.8354,
+ u'mapping_stats.mapped_subread_readlength_max': 12846.0,
+ u'mapping_stats.mapped_subread_readlength_mean': 3823,
+ u'mapping_stats.mapped_subreadlength_n50': 5799,
+ u'mapping_stats.mapped_subreadlength_q95': 9530,
+ u'mapping_stats.mapped_subreads_n': 212005,
+ u'variants.longest_contig_name': u'11k_pbell_H1-6_ScaI_circular_3x_l65796',
+ u'variants.mean_contig_length': 65796.0,
+ u'variants.weighted_mean_bases_called': 0.9999544045230713,
+ u'variants.weighted_mean_concordance': 0.9999696016293528,
+ u'variants.weighted_mean_coverage': 11251.832147121406}
+
+In [8]: 
+ 
+``` 
+
+
+
+## Tool Contract and Resolved Tool Contracts
+
 To integrate with the pbsmrtpipe workflow engine you must to be able to generate a **Tool Contract** and to be able to run from a **Resolved Tool Contract**.
 
 A **Tool Contract** contains the metadata of the exe, such as the file types of inputs, outputs and options.
@@ -76,19 +161,20 @@ $> python -m pbcommand.cli.examples.dev_quick_hello_world -o /path/to/my-tool-co
 ```
 
 
-Creating a Full Commandline Tool with TC/RTC and argparse support
------------------------------------------------------------------
+## Creating a Full Commandline Tool with TC/RTC and argparse support
+
 
 Three Steps
-- define Parser
+- define Parser using `get_pbparser`
 - add running from argparse and running from Resolved ToolContract funcs to call your main
 - add call to driver
 
-Import or define your main function.
+Import or define your main function from your library.
 
 ```python
+
 def run_my_main(fasta_in, fasta_out, min_length):
-    # do stuff. Main should return an int exit code
+    # do work. Main should return an int exit code and be completely independent of argparse
     return 0
 ```
 
@@ -135,11 +221,11 @@ def get_contract_parser():
 ```
         
 
-Define a Wrapping layer to call your main from both the tool contract and raw argparse IO layer
+Define a Wrapping IO layer to call your main function from both the tool contract and raw argparse IO layer
 
 ```python
 def _args_runner(args):
-    # this is the args from parser.parse_args()
+    # this is the args from parser.parse_args() using the python stdlib argparse model
     # the properties of args are defined as "labels" in the add_args_and_options func.
     return run_my_main(args.fasta_in, args.fasta_out, args.read_length)
 
@@ -154,8 +240,6 @@ def _resolved_tool_contract_runner(resolved_tool_contract):
 ```
     
     
-    
-    
 Add running layer
 
 ```python
@@ -190,7 +274,7 @@ Now you can run your tool via the argparse standard interface as well as emittin
 And you can run the tool from a **Resolved Tool Contract**
 
 ```sh
-> python -m pbcommand.cli.example.dev_app --resolved-tool-contract /path/to/resolved_contract.json
+> python -m pbcommand.cli.examples.dev_app --resolved-tool-contract /path/to/resolved_contract.json
 ```
 
 See the dev apps in ["pbcommand.cli.examples"](https://github.com/PacificBiosciences/pbcommand/blob/master/pbcommand/cli/examples/dev_app.py) for a complete application (They require pbcore to be installed).
@@ -198,7 +282,7 @@ See the dev apps in ["pbcommand.cli.examples"](https://github.com/PacificBioscie
 In addition to TC/RTC support, there's a complete argparse support for the task options. An example of **help** is shown below.
 
 ```sh
-(pbcommand_test)pbcommand $> python -m 'pbcommand.cli.examples.dev_app' --help
+$> python -m pbcommand.cli.examples.dev_app --help
 usage: dev_app.py [-h] [-v] [--versions] [--emit-tool-contract]
                   [--resolved-tool-contract RESOLVED_TOOL_CONTRACT]
                   [--log-level LOG_LEVEL] [--debug]
diff --git a/REQUIREMENTS.txt b/REQUIREMENTS.txt
index 5368e16..b52ac5b 100644
--- a/REQUIREMENTS.txt
+++ b/REQUIREMENTS.txt
@@ -1,5 +1,3 @@
-xmlbuilder
-jsonschema
 avro
 requests
-iso8601
\ No newline at end of file
+iso8601
diff --git a/REQUIREMENTS_TEST.txt b/REQUIREMENTS_TEST.txt
index 55da931..a9b1e63 100644
--- a/REQUIREMENTS_TEST.txt
+++ b/REQUIREMENTS_TEST.txt
@@ -1,11 +1,12 @@
 nose
 tox
-numpy # this is required for pbreports serialization
 # Putting these here for RTD
 sphinx-argparse
 sphinx-bootstrap-theme
-xmlbuilder
-jsonschema
 avro
 requests
-iso8601
\ No newline at end of file
+iso8601
+# For sphinx extension
+ipython
+# ipython requires this?
+matplotlib
diff --git a/circle.yml b/circle.yml
index 80d0fc2..4d648bb 100644
--- a/circle.yml
+++ b/circle.yml
@@ -11,4 +11,4 @@ dependencies:
 test:
   override:
     - mkdir -p $CIRCLE_TEST_REPORTS
-    - nosetests -s --verbose --with-xunit --xunit-file=$CIRCLE_TEST_REPORTS/nosetests.xml --logging-config log_nose.cfg tests/test_*.py
\ No newline at end of file
+    - make test
diff --git a/docs/source/advanced_task_types.rst b/docs/source/advanced_task_types.rst
new file mode 100644
index 0000000..6f53771
--- /dev/null
+++ b/docs/source/advanced_task_types.rst
@@ -0,0 +1,128 @@
+Advanced Task/ToolContract Types
+================================
+
+
+To enable pipeline scaling, "Chunking" of files two new Tool Contract types extend the base Tool Contract data model.
+
+
+
+Scattering/Chunking Tool Contract
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+Tasks/ToolContract that take a any file type(s) and emit a **single** scatter.chunk.json file.
+
+
+At a high level, the Scatter Tool Contract data model extends the core Tool Contract model and adds two fields, `chunk_keys` and `nchunks`.
+
+- `chunk_keys` is the expected key(s) that will be written to the `PipelineChunk` data model (defined below)
+- `nchunks` mirrors the `nproc` model of using a symbol `$max_nchunks` or an int to define the absolute upper bound on the number of chunks that should be created. If this value is exceeded, the pipeline engine will immediately fail the execution.
+
+
+Example Tool Contract
+
+.. literalinclude:: ../../tests/data/tool-contracts/dev_scatter_fasta_app_tool_contract.json
+    :language: javascript
+
+
+
+PipelineChunk Data Model
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+
+The `PipelineChunk` data model is defined in `pbcommand.models` and the companion IO layers (`load_pipeline_chunks_from_json` and `write_pipeline_chunks` are in `pbcommand.pb_io`.
+
+Each input file **must** be mapped to a `chunk_key` that can then be mapped to the input of the original `unchunked` task.
+
+For example, if there's a single input file (e.g., FileTypes.FASTA), then the Scatter ToolContract should define a `chunk_key` of "fasta_id". `chunk_key`(s) that do NOT start with `$chunk.` will considered to be extra metadata that will be passed through. This is useful for adding chunk specific metadata, such as the number of contigs or average contig length.
+
+Minimal example of reading and writing `PipelineChunk(s)` data model.
+
+.. ipython::
+
+
+    In [1]: from pbcommand.models import PipelineChunk
+
+
+    In [5]: c0 = PipelineChunk("scattered-fasta_0", **{"$chunk.fasta_id":"/path/to/chunk-0.fasta"})
+
+    In [6]: c1 = PipelineChunk("scattered-fasta_1", **{"$chunk.fasta_id":"/path/to/chunk-1.fasta"})
+
+    In [7]: chunks = [c0, c1]
+
+    In [8]: from pbcommand.pb_io import write_pipeline_chunks
+
+    In [10]: write_pipeline_chunks(chunks, "test-scatter.chunk.json", "Test comment")
+
+    In [11]: from pbcommand.pb_io import load_pipeline_chunks_from_json
+
+    In [12]: load_pipeline_chunks_from_json("test-scatter.chunk.json")
+    Out[12]:
+    [<PipelineChunk id='scattered-fasta_0' chunk keys=$chunk.fasta_id >,
+     <PipelineChunk id='scattered-fasta_1' chunk keys=$chunk.fasta_id >]
+
+
+
+
+
+Defining a Scatter Tool Contract
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Currently, python is the only language that is supported for writing CHUNK JSON files.
+
+The python Scatter tool contract API follows similar to base Tool Contract API,
+
+
+Simple example of Scattering/Chunking a single Fasta file. The notable points are adding the required `chunk_keys` and `nchunks` to the scattering specific pbparser.
+
+
+.. literalinclude:: ../../pbcommand/cli/examples/dev_scatter_fasta_app.py
+    :language: python
+
+
+Advanced Scattering/Chunking Patterns
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For more advanced scattering/chunks usecases, such as chunking multiple input files (e.g., task input signature SubreadSet and ReferenceSet XML), this will require writing a chunk key for each input. Specifically, `$chunk.subreadset_id` and `$chunk.referenceset_id` to the `PipelineChunk`.
+
+This enables the chunking pattern of a specific task to be completely decoupled from the workflow level. The chunking pattern is communicated in the chunk(s) in `PipelineChunk` defined by the chunking task. In this specific chunking pattern, the SubreadSet is chunked into N files, while the ReferenceSet is passed unchunked.
+
+These chunk keys combined with the chunk operator (defined in pbsmrtpipe_) communicates to the workflow engine how to pass `$chunk.subreadset_id` to the first input of N-chunked instances of unchunked task. Similarly, the `$chunk.referenceset_id` to the second input of the N-chunked task instance.
+
+
+See the pbsmrtpipe_ docs and the testkit-data jobs in pbsmrtpipe for more details.
+
+
+Gather ToolContract
+~~~~~~~~~~~~~~~~~~~
+
+
+A Gather Tool Contract takes a **single** CHUNK Json file type as input and emits a **single** output file of any type.
+
+
+
+Example:
+
+.. literalinclude:: ../../tests/data/tool-contracts/dev_gather_fasta_app_tool_contract.json
+    :language: javascript
+
+
+The Gather task doesn't extend the base ToolContract and add new properties. However, it will restrict the the input type to `FileTypes.CHUNK` and the output type signature **must only be one file type**.
+
+
+Example Gather Tool:
+
+.. literalinclude:: ../../pbcommand/cli/examples/dev_gather_fasta_app.py
+    :language: python
+
+
+For Gather'ing a task that has a multiple N outputs, N gather tasks must be defined.
+
+See the pbsmrtpipe_ docs for details of constructing a chunked pipeline.
+
+
+More examples of scatter/chunking and gather tasks are in pbcoretools_.
+
+.. _pbsmrtpipe: http://pbsmrtpipe.readthedocs.io
+
+.. _pbcoretools: https://github.com/PacificBiosciences/pbcoretools/tree/master/pbcoretools/tasks
\ No newline at end of file
diff --git a/docs/source/api.rst b/docs/source/api.rst
new file mode 100644
index 0000000..6ab3134
--- /dev/null
+++ b/docs/source/api.rst
@@ -0,0 +1,40 @@
+pbcommand API docs
+##################
+
+The `pbcommand` package provides Python modules for common data models, building commandline tools (e.g., ToolContract, CLI parser interface) and interacting with the SMRT Link webservices.
+
+
+Library API documentation
+=========================
+
+    .. automodule:: pbcommand.pb_io
+        :members:
+
+    :doc:`pbcommand.utils`: Util functions
+
+    :doc:`pbcommand.models`: Core models
+
+    :doc:`pbcommand.models.file_types`: Registered File Types
+
+    :doc:`pbcommand.models.tc` Tool Contract data model
+
+    :doc:`pbcommand.models.rtc` Resolved Tool Contract data model
+
+    :doc:`pbcommand.engine`: Util functions for calling an external process
+
+    :doc:`pbcommand.cli`: Commandline interface
+
+    :doc:`pbcommand.pb_io.tc`: IO to load Tool Contract and Resolved Tool Contracts
+
+    :doc:`pbcommand.pb_io.pipeline`: IO to load Pipeline and Pipeline Chunk
+
+    :doc:`pbcommand.models.task_options`: PacBio Task Options models
+
+    :doc:`pbcommand.services`: Client API to SMRT Link WebServices
+
+    :doc:`pbcommand.models.view_rules`: View Rules used by SMRT Link WebServices
+
+
+
+
+
diff --git a/docs/source/commandline_interface.rst b/docs/source/commandline_interface.rst
index b36feaf..cfbeb20 100644
--- a/docs/source/commandline_interface.rst
+++ b/docs/source/commandline_interface.rst
@@ -12,9 +12,9 @@ Motivation And High Level Example
 Benefits
 ~~~~~~~~
 
-- A consistent concrete common interface for shelling out to executables
+- A consistent concrete common interface for shelling out to an executable
 - task options have a consistent model for validation
-- task versioning is supported
+- task version is supported
 - A principled model for wrapping tools. For example, pbalign would "inherit" blasr options and extend, or wrap them.
 - Once a manifest has been defined and registered to pbsmrtpipe, the task/manifest can be referenced in pipelines with no additional work
 
@@ -28,13 +28,13 @@ Terms
 - 'Resolved Tool Contract' is a single file that contains the resolved values in the manifest
 - 'Driver' is the general interface for calling a commandline exe. This can be called from the commandline or directly as an API call (via any language which supports the manifest interface).
 
-Hello World Example
-~~~~~~~~~~~~~~~~~~~
+Hello World Dev Example
+~~~~~~~~~~~~~~~~~~~~~~~
 
-Tool Contract file for 'my-exe'
+Tool Contract example for an exe, 'python -m pbcommand.cli.example.dev_app` with tool contract id `pbcommand.tasks.dev_app`.
 
 
-.. literalinclude:: ../../tests/data/dev_example_tool_contract.json
+.. literalinclude:: ../../tests/data/tool-contracts/pbcommand.tasks.dev_app_tool_contract.json
     :language: javascript
 
 
@@ -56,7 +56,9 @@ Note. A single driver can reference many manifests. For example "pbreports" woul
 Programmatically defining a Parser to Emit a Tool Contract
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-pbcommand provides a API to create a tool contract and an argparse instance from a single interface. This facilitates a single point of defining options and keeps the standard commandline entry point and the tool contract to be in sync. This also allows your tool to emit the tool contract to stdout using "--emit-tool-contract" and to be run from a **Resolved Tool Contract** using the "--resolved-tool-contract /path/to/resolved-tool-contract.json" commandline argument.
+pbcommand provides a API to create a tool contract and an argparse instance from a single interface. This facilitates a single point of defining options and keeps the standard commandline entry point and the tool contract to be in sync.
+
+This also allows your tool to emit the tool contract to stdout using "--emit-tool-contract" **and** the tool to be run from a **Resolved Tool Contract** using the "--resolved-tool-contract /path/to/resolved-tool-contract.json" commandline argument **while** also supporting the python standards commandline interface via argparse.
 
 Complete App shown below.
 
@@ -66,77 +68,50 @@ Complete App shown below.
 
 .. note:: Options must be prefixed with {pbcommand}.task_options.{option_id} format.
 
-Details of Resolved Tool Contract
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Details and Example of a Resolved Tool Contract
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
+- Language agnostic JSON format to encode the resolved values
 - input, outputs file types are resolved to file paths
 - nproc and other resources are resolved
+- IO layers to convert between JSON and python using `load_resolved_tool_contract_from` in `pbcommand.pb_io`
 
+Example Resolved Tool Contract:
 
-.. literalinclude:: ../../tests/data/dev_example_resolved_tool_contract.json
+.. literalinclude:: ../../tests/data/resolved-tool-contracts/dev_example_resolved_tool_contract.json
     :language: javascript
 
 
-Library usage
-~~~~~~~~~~~~~
-
-(language API example)
-
-
-Example of using a manifest in an tool, such as mapping status report.
-
-.. code-block:: python
+Testing Tool Contracts
+~~~~~~~~~~~~~~~~~~~~~~
 
-    from pbcommand.pb_io import load_tool_contract_from
-    # your application was called via "pbreports resolved-manifest.json"
-    p = "/path/to/resolved-tool-contract.json"
-    # load resolved manifest from
-    rtc = load_tool_contract_from(p)
+There is a thin test framework in `pbcommand.testkit` to help test tool contracts from within nose.
 
-    # general call to mapping stats report main
-    # mapping_stats_main("/path/to/align.dataset.xml", "/path/to/reference.dataset.xml", "/path/to/output.json", my_option=1235)
-    exit_code = mapping_stats_main(rtc.input_files[0], rtc.input_files[1], rtc.output_files[0], rtc.opts["pbreports.task_options.my_option"])
+The `PbTestApp` base class will provide the core validation of the outputs as well as handled the creation of the resolved tool contract.
 
+Output Validation assertions
 
-Example to resolving the Tool Contract
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+- validates Output files exist
+- validates resolved task options
+- validates resolved value of is distributed
+- validates resolved value of nproc
 
-The resolver must have assigned values for max nproc, root temp dir,
-output dir. The output dir can be used to assign the output paths of
-the output files.
+Example:
 
-.. code-block:: python
-
-    # simple python example, the scala or C++ API would be similar
-
-    from pbcommand.pb_io import load_tool_contract_from
-    from pbcommand.cli import resolve_tool_contract
-
-    # load tool contract that is registered to your python package
-    tool_contract = load_tool_contract_from("/path/to/tool-contract.json")
-    tool_contract = ToolContractRegistry.get("pbsmrtpipe.tasks.dev_static_task")
-
-    max_nproc = 3
-    tmp_dir = "/tmp/my-tmp"
-    output_dir = os.getcwd()
+.. literalinclude:: ../../tests/test_e2e_example_apps.py
+    :language: python
 
-    input_files = ("/path/to/file.csv", "/path/to/dataset.subreads.xml")
-    options = {"pbsmrtipe.task_options.my_option": 1234}
 
-    # create instance of Resolved Tool Contract
-    rtc = resolve_tool_contract(tool_contract, input_files, output_dir, tmp_dir, max_nproc, options)
+Tips
+~~~~
 
-    # TODO. Not implemented yet
-    # The driver will run the tool, validate output files exist and
-    # cleanup any temp files/resources.
-    result = run_tool_contract_driver(rtc, cleanup=False)
+A dev tool within pbcommand can help convert Tool Contract JSON files to Resolved Tool Contract for testing purposes.
 
-    print result.exit_code
-    print result.error_message
-    print result.host_name
-    print result.run_time
 
-    # sugar to persist results
-    result.write_json("output-results.json")
+.. argparse::
+   :module: pbcommand.interactive_resolver
+   :func: get_parser
+   :prog: python -m pbcommand.interactive_resolver
 
 
+.. note::  This tool has dependency on `prompt_kit` and can be installed via pip.
\ No newline at end of file
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 57cb70b..f651708 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -16,6 +16,7 @@ import sys
 import os
 import shlex
 import sphinx_bootstrap_theme
+import pbcommand
 
 # If extensions (or modules to document with autodoc) are in another directory,
 # add these directories to sys.path here. If the directory is relative to the
@@ -35,6 +36,8 @@ extensions = [
     'sphinx.ext.doctest',
     'sphinx.ext.coverage',
     'sphinx.ext.viewcode',
+    'IPython.sphinxext.ipython_console_highlighting',
+    'IPython.sphinxext.ipython_directive'
 ]
 
 # For argparse
@@ -56,7 +59,7 @@ master_doc = 'index'
 
 # General information about the project.
 project = u'pbcommand'
-copyright = u'2015, Michael Kocher'
+copyright = u'2015-2017, Michael Kocher'
 author = u'Michael Kocher'
 
 # The version info for the project you're documenting, acts as replacement for
@@ -64,9 +67,9 @@ author = u'Michael Kocher'
 # built documents.
 #
 # The short X.Y version.
-version = '0.1.0'
+version = pbcommand.get_version()
 # The full version, including alpha/beta/rc tags.
-release = '0.1.0'
+release = pbcommand.get_version()
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
@@ -294,3 +297,5 @@ texinfo_documents = [
 
 # If true, do not generate a @detailmenu in the "Top" node's menu.
 #texinfo_no_detailmenu = False
+
+autoclass_content = 'both'
diff --git a/docs/source/index.rst b/docs/source/index.rst
index f8de83e..0303655 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -12,7 +12,10 @@ Contents:
    :maxdepth: 2
 
    commandline_interface
+   advanced_task_types
    services
+   report_model
+   api
 
 
 Indices and tables:
diff --git a/docs/source/pbcommand.cli.rst b/docs/source/pbcommand.cli.rst
new file mode 100644
index 0000000..ae2f6a5
--- /dev/null
+++ b/docs/source/pbcommand.cli.rst
@@ -0,0 +1,8 @@
+pbcommand Commandline Interface
+###############################
+
+Pacbio Commandline
+
+
+  .. automodule:: pbcommand.cli
+    :members: pacbio_args_runner, pacbio_args_or_contract_runner, pbparser_runner, get_default_argparser, get_default_argparser_with_base_opts
diff --git a/docs/source/pbcommand.engine.rst b/docs/source/pbcommand.engine.rst
new file mode 100644
index 0000000..d4c71e4
--- /dev/null
+++ b/docs/source/pbcommand.engine.rst
@@ -0,0 +1,9 @@
+pbcommand Engine
+################
+
+
+Util functions for calling external commands
+
+
+  .. automodule:: pbcommand.engine
+        :members: run_cmd, ExtCmdResult
diff --git a/docs/source/pbcommand.models.file_types.rst b/docs/source/pbcommand.models.file_types.rst
new file mode 100644
index 0000000..14a15a0
--- /dev/null
+++ b/docs/source/pbcommand.models.file_types.rst
@@ -0,0 +1,11 @@
+pbcommand Models
+################
+
+
+Pacbio Registered FileTypes
+
+
+  .. automodule:: pbcommand.models
+    :members: FileTypes
+    :undoc-members:
+    :show-inheritance:
\ No newline at end of file
diff --git a/docs/source/pbcommand.models.rst b/docs/source/pbcommand.models.rst
new file mode 100644
index 0000000..1243ad2
--- /dev/null
+++ b/docs/source/pbcommand.models.rst
@@ -0,0 +1,11 @@
+pbcommand Models
+################
+
+
+Core Models
+
+
+  .. automodule:: pbcommand.models
+    :members: FileType, TaskOptionTypes, DataStore, DataStoreFile, PipelineChunk, SymbolTypes,ResourceTypes
+    :undoc-members:
+    :show-inheritance:
diff --git a/docs/source/pbcommand.models.rtc.rst b/docs/source/pbcommand.models.rtc.rst
new file mode 100644
index 0000000..2e3b734
--- /dev/null
+++ b/docs/source/pbcommand.models.rtc.rst
@@ -0,0 +1,11 @@
+pbcommand Resolved Tool Contract
+################################
+
+
+Resolved Tool Contract Models
+
+
+  .. automodule:: pbcommand.models
+    :members: ResolvedToolContractTask, ResolvedScatteredToolContractTask, ResolvedGatherToolContractTask, ResolvedToolContract
+    :undoc-members:
+    :show-inheritance:
\ No newline at end of file
diff --git a/docs/source/pbcommand.models.task_options.rst b/docs/source/pbcommand.models.task_options.rst
new file mode 100644
index 0000000..52675a4
--- /dev/null
+++ b/docs/source/pbcommand.models.task_options.rst
@@ -0,0 +1,11 @@
+pbcommand Task Option Types
+###########################
+
+
+Supported Task Option Data models
+
+
+  .. automodule:: pbcommand.models
+    :members: BasePacBioOption, PacBioIntOption, PacBioBooleanOption,PacBioStringOption,PacBioFloatOption,PacBioIntChoiceOption,PacBioFloatChoiceOption,PacBioStringChoiceOption
+    :undoc-members:
+    :show-inheritance:
diff --git a/docs/source/pbcommand.models.tc.rst b/docs/source/pbcommand.models.tc.rst
new file mode 100644
index 0000000..54b7bb9
--- /dev/null
+++ b/docs/source/pbcommand.models.tc.rst
@@ -0,0 +1,11 @@
+pbcommand Tool Contract
+#######################
+
+
+Tool Contract Models
+
+
+  .. automodule:: pbcommand.models
+    :members: ToolContractTask, ToolDriver, InputFileType, OutputFileType, MalformedToolContractError, MalformedResolvedToolContractError, ToolContract, ScatterToolContractTask, GatherToolContractTask
+    :undoc-members:
+    :show-inheritance:
\ No newline at end of file
diff --git a/docs/source/pbcommand.models.view_rules.rst b/docs/source/pbcommand.models.view_rules.rst
new file mode 100644
index 0000000..63cadfb
--- /dev/null
+++ b/docs/source/pbcommand.models.view_rules.rst
@@ -0,0 +1,11 @@
+pbcommand View Rules
+####################
+
+
+View Rule Models
+
+
+  .. automodule:: pbcommand.models
+    :members: DataStoreViewRule, PipelineDataStoreViewRules
+    :undoc-members:
+    :show-inheritance:
\ No newline at end of file
diff --git a/docs/source/pbcommand.pb_io.pipeline.rst b/docs/source/pbcommand.pb_io.pipeline.rst
new file mode 100644
index 0000000..ad41549
--- /dev/null
+++ b/docs/source/pbcommand.pb_io.pipeline.rst
@@ -0,0 +1,9 @@
+pbcommand IO Pipeline
+#####################
+
+
+IO utils for loading Tool Contract and Resolved Tool Contract
+
+
+  .. automodule:: pbcommand.pb_io
+        :members: load_tool_contract_from,load_resolved_tool_contract_from,load_pipeline_presets_from,write_resolved_tool_contract,write_tool_contract,write_resolved_tool_contract_avro,write_tool_contract_avro
diff --git a/docs/source/pbcommand.pb_io.tc.rst b/docs/source/pbcommand.pb_io.tc.rst
new file mode 100644
index 0000000..8615912
--- /dev/null
+++ b/docs/source/pbcommand.pb_io.tc.rst
@@ -0,0 +1,9 @@
+pbcommand IO Tool Contract and Resolved Tool Contract
+#####################################################
+
+
+IO utils for loading Tool Contract and Resolved Tool Contract
+
+
+  .. automodule:: pbcommand.pb_io
+        :members: load_tool_contract_from,load_resolved_tool_contract_from,load_pipeline_presets_from,write_resolved_tool_contract,write_tool_contract,write_resolved_tool_contract_avro,write_tool_contract_avro
diff --git a/docs/source/pbcommand.services.rst b/docs/source/pbcommand.services.rst
new file mode 100644
index 0000000..551e468
--- /dev/null
+++ b/docs/source/pbcommand.services.rst
@@ -0,0 +1,8 @@
+pbcommand Client to SMRT Link Services
+######################################
+
+Python client to SMRT Link Services
+
+
+  .. automodule:: pbcommand.services
+    :members: ServiceAccessLayer
\ No newline at end of file
diff --git a/docs/source/pbcommand.utils.rst b/docs/source/pbcommand.utils.rst
new file mode 100644
index 0000000..2d792e2
--- /dev/null
+++ b/docs/source/pbcommand.utils.rst
@@ -0,0 +1,9 @@
+pbcommand Utils
+###############
+
+
+Util functions
+
+
+  .. automodule:: pbcommand.utils
+        :members:
diff --git a/docs/source/report_model.rst b/docs/source/report_model.rst
new file mode 100644
index 0000000..0011ed9
--- /dev/null
+++ b/docs/source/report_model.rst
@@ -0,0 +1,192 @@
+Report Models
+=============
+
+A report is composed of model objects whose classes are defined in pbreports.model. Typically, a report object is created and then attributes, tables, or plotGroups are
+added to the report. Lastly, the report is serialized as json to a file.
+
+The objects that comprise a report extend BaseReportElement. `All report elements have an id`.
+
+When the report is converted to a dictionary before serialization, each report element's id is prepended with its parent id,
+which has also been prepended. For example, given the nested elements of report --> plotGroup --> plot, with respective ids "r", "pg", and "p",
+the plot id would be "r.pg.p" in the dictionary.
+
+This allows elements of a report to be looked up id, such as "mapping_stats.n_mapped_bases" for a report Attribute (i.e., metric), or a specific plot group, such as "filter_report.filtered_subreads".
+
+.. note:: Once a report element id has been assigned, it should not change.
+
+Report
+------
+
+Report is the root class of the model hierarchy. It's instantiated with an id (should be a short string), which defines its namespace. 
+This example shows how a report is with one attribute, plotGroup, and table is created and written.
+
+.. code-block:: python
+
+    import os
+    import logging
+
+    from pbcommand.models.report import Report, Attribute, PlotGroup, Table
+
+    log = logging.getLogger(__name__)
+   
+    def make_report():
+        """Write a simple report"""
+        table = create_table() # See example below
+        attribute = create_attribute() # See example below
+        plotGroup = create_plotGroup() # See example below
+
+        # Id must match ^[a-z0-9_]+$
+        r = Report('loading', title="Loading Report",
+                attributes=[attribute],
+                plotgroups=[plotGroup],
+                tables=[table])
+
+        # Alternatively
+        r.add_table(table)
+        r.add_attribute(attribute)
+        r.add_plotGroup(plotGroup)
+
+        r.write_json('/my/file.json')
+            
+
+Attribute
+---------
+
+An attribute represents a key-value pair with an optional name. The id is the key. A report contains
+a list of attributes.
+
+.. code-block:: python
+
+    import os
+    import logging
+
+    from pbcommand.models.report import Attribute
+
+    log = logging.getLogger(__name__)
+   
+    def create_attribute():
+        """Return an attribute"""
+        a = Attribute('alpha', 1234, name='Alpha')
+        b = Attribute('beta', "value", name="Beta Display Name")
+        return a
+            
+
+Table
+-----
+
+A table contains a list of column objects and has an optional title and id. A report contains a list of tables.
+In general, the paradigm for creating a table is to instantiate a table and a series of columns. Add the 
+columns to the table in the desired order. Finally, iterate over your data set and append data to the
+columns by index.
+ 
+.. code-block:: python
+
+    import os
+    import logging
+    import random
+
+    from pbcommand.models.report import Attribute, Table, Column
+
+    log = logging.getLogger(__name__)
+   
+    def create_table():
+        """Return a table with 2 columns"""
+        columns = [Column( 'c1id', header='C1 header'),
+                Column('c2id', header='C2 header')]
+
+        t = Table('myid', title='My Table', columns=columns)
+
+        #Now append data to the columns
+        #Assume data is a list of tuples of len == 2
+        datum = [(c.id, random.random()) for c in columns]
+        for column_id, value in datum:
+            t.add_data_by_column_id(column_id, value)
+
+        # Alternatively
+        cx = Column("cx", header="X", values=[1,2,3,4])
+        cy = Column("cy", header="Y", values=[1,4,9,16])
+        t = Table("xy", title="X vs Y", columns=[cx, cy])
+        return t
+            
+        
+PlotGroup
+---------
+
+A `Plot Group` represents a logical grouping or collection of plots that convey related information, such coverage across
+5 contigs. A plotGroup has an id, an optional thumbnail (to represent the group in SMRT Link in a
+preview), an optional legend and a list of plots.
+
+.. code-block:: python
+
+    import os
+    import logging
+
+    from pbcommand.model.report import PlotGroup, Plot
+
+    log = logging.getLogger(__name__)
+   
+    def create_plotGroup():
+        """Return a PlotGroup with 1 plot"""
+        # Image paths must be relative to the dir where the final Report is written
+
+        plot = Plot('plot_id', image='image.png', caption='this is a plot')
+        p = PlotGroup('myid', title='some title', thumbnail='image_thumb.png', plots=[plot])
+
+        return p
+            
+
+.. note:: The image paths must be written relative to where the report JSON file will be written.
+
+.. note:: Currently, only PNG is supported
+
+
+Report Specs
+============
+
+A parallel family of models in the same module handles specifications for
+individual reports, i.e. enumerating the data items expected for each model
+type, along with view metadata.  The overall structure and names of objects in
+the hierarchy is identical to the Report model.  For any of the nodes in the
+hierarchy, the following view metadata may be specified:
+
+  - a UI label, usually `title` (or `name` for Attributes, `header` for table
+    columns)
+  - a description suitable for formal documentation or mouseover text
+  - a boolean `isHidden` attribute that controls visibility
+
+There is some redundancy between the report specifications and the actual
+reports - for example the Report `title` and Attribute `name` occur in both
+models.  This was due to the lack of a clear model for view metadata in previous
+versions of SMRTAnalysis; the Report model may be slimmed down in the future as
+the view rules are deployed and utilized.
+
+The `pbcommand` module itself does not actually define any reports; currently
+most of these are part of the `pbreports` module.
+
+Format strings
+--------------
+
+For formatting numerical attribute and column values, we are using a
+lightweight syntax based on Python's `str.format(...)` method.  If the
+`format` attribute is set to `None` (`null` in JSON), the value should
+simply be directly converted to string without any formatting.  (In the case
+of string and boolean values, the format should always be left unset.)  More
+complex operations values must match this regular expression::
+
+  {([GMkp]{0,1})(:)([\.,]{0,1})([0-9]*)([dfg]{1})}(.*)$
+
+The `[GMkp]` group specifies scaling - if one of these characters is present,
+the value should be divided by one billion (`G`), one million (`M`), or one
+thousand (`k`) before formatting, or multiplied by 100 (`p`).  The period or
+comma after the colon modifies the display of floating-point and integer
+values respectively.  The following characters before the closing brace
+correspond to conventional format string syntax.  The format can optionally
+include a suffix to be appended to the formatted value.
+
+Examples of use::
+
+  format_value("{:,d}", 123456)           # 123,456
+  format_value("{:.2f)", 1.23456)         # 1.23
+  format_value("{G:.2f} Gb", 1234567890)  # 1.23 Gb
+  format_value("{p:5g}%", 0.987654321)    # 98.765%
+  format_value(None, 0.987654321)         # 0.987654321
diff --git a/docs/source/services.rst b/docs/source/services.rst
index 7032765..2f60832 100644
--- a/docs/source/services.rst
+++ b/docs/source/services.rst
@@ -2,8 +2,43 @@
 SMRT Service commandline interface
 ==================================
 
+A high level client to the SMRT Link Services is accessible from `ServiceAccessLayer` in `pbcommand.services`.
 
-.. argparse::
-   :module: pbcommand.services.cli
-   :func: get_parser
-   :prog: pbservice
\ No newline at end of file
+Client Layer
+~~~~~~~~~~~~
+
+Example:
+
+
+.. code-block:: python
+
+    In [1]: from pbcommand.services import ServiceAccessLayer
+
+    In [2]: s = ServiceAccessLayer("smrtlink-alpha", 8081)
+
+    In [3]: s.get_status()
+    Out[3]:
+    {u'id': u'smrtlink_analysis',
+     u'message': u'Services have been up for 141 hours, 37 minutes and 13.138 seconds.',
+     u'uptime': 509833138,
+     u'user': u'secondarytest',
+     u'uuid': u'12e1c62a-99a4-46c1-b616-a327dc38525f',
+     u'version': u'0.1.8-3a66e4a'}
+
+    In [4]: jobs = s.get_analysis_jobs()
+
+    In [5]: j = s.get_analysis_job_by_id(3)
+
+    In [6]: j.state, j.name
+    Out[6]: ('SUCCESSFUL', 'sirv_isoseq')
+
+    In [7]: import pbcommand; pbcommand.get_version()
+    Out[7]: '0.4.9'
+
+
+Commandline Tool Interface to Services
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. warning:: This has been migrated to scala in smrtflow_. Support for the python Client layer API will remain, however the python commandline tool is no longer installed by default and will be removed in a future version.
+
+.. _smrtflow: https://github.com/PacificBiosciences/smrtflow
diff --git a/pacbio-manifest.json b/pacbio-manifest.json
new file mode 100644
index 0000000..be751c7
--- /dev/null
+++ b/pacbio-manifest.json
@@ -0,0 +1,11 @@
+[
+  {
+    "id": "pbcommand",
+    "name": "pbcommand",
+    "_comment": "Need to add the first 6 chars of the GH SHA",
+    "version": "0.4.5-FIXME",
+    "description": "Python Common library for ToolContract Interface, Report, DataStore and SMRT Link Service Client",
+    "dependencies": [
+    ]
+  }
+]
diff --git a/pbcommand/__init__.py b/pbcommand/__init__.py
index 73486c9..bd0d1fa 100644
--- a/pbcommand/__init__.py
+++ b/pbcommand/__init__.py
@@ -1,4 +1,4 @@
-VERSION = (0, 3, 22)
+VERSION = (0, 5, 3)
 
 
 def get_version():
diff --git a/pbcommand/cli/core.py b/pbcommand/cli/core.py
index d6c5a24..762a0bb 100644
--- a/pbcommand/cli/core.py
+++ b/pbcommand/cli/core.py
@@ -192,11 +192,11 @@ def pacbio_args_or_contract_runner(argv,
 
     :param args_runner_func: func(args) => int signature
 
-    :param contract_tool_runner_func: func(tool_contract_instance) should be
-    the signature
+    :param contract_tool_runner_func: func(tool_contract_instance) should be the signature
 
     :param alog: a python log instance
     :param setup_log_func: func(log_instance) => void signature
+
     :return: int return code
     :rtype: int
     """
@@ -250,7 +250,7 @@ def pbparser_runner(argv,
 
     if EMIT_TOOL_CONTRACT_OPTION in argv:
         # print tool_contract
-        x = json.dumps(tool_contract.to_dict(), indent=4)
+        x = json.dumps(tool_contract.to_dict(), indent=4, separators=(',', ': '))
         print x
     else:
         return pacbio_args_or_contract_runner(argv, arg_parser, args_runner_func, contract_runner_func, alog, setup_log_func)
diff --git a/pbcommand/cli/examples/dev_app.py b/pbcommand/cli/examples/dev_app.py
old mode 100755
new mode 100644
diff --git a/pbcommand/cli/examples/dev_gather_fasta_app.py b/pbcommand/cli/examples/dev_gather_fasta_app.py
index 5c15a8b..a93e7f3 100644
--- a/pbcommand/cli/examples/dev_gather_fasta_app.py
+++ b/pbcommand/cli/examples/dev_gather_fasta_app.py
@@ -1,8 +1,5 @@
 """Example of Gather TC to gather several $chunk.fasta_id in chunk.json file.
 
-
-There's a bit of code here that is copied from pbsmrtpipe.tools.chunk_utils.
-Martin will eventually refactor this into pbcore.
 """
 import logging
 import sys
@@ -82,10 +79,8 @@ def run_main(chunked_json, output_fasta, chunk_key):
 
 def get_parser():
 
-    driver = "python -m pbcommand.cli.examples.dev_scatter_fasta_app --resolved-tool-contract "
+    driver = "python -m pbcommand.cli.examples.dev_gather_fasta_app --resolved-tool-contract "
     desc = "Gather a fasta resources in a Chunk.json file"
-    # chunk keys that will be written to the file
-    chunk_key = "$chunk.fasta_id"
     p = get_gather_pbparser(TOOL_ID, __version__, "Fasta Chunk Gather",
                             desc, driver, is_distributed=False)
     p.add_input_file_type(FileTypes.CHUNK, "chunk_json", "Chunk JSON", "Chunked Fasta JSON Out")
diff --git a/pbcommand/cli/examples/dev_mixed_app.py b/pbcommand/cli/examples/dev_mixed_app.py
index 5078d03..76328ba 100644
--- a/pbcommand/cli/examples/dev_mixed_app.py
+++ b/pbcommand/cli/examples/dev_mixed_app.py
@@ -35,7 +35,7 @@ def _get_contract_parser():
     # Number of processors to use
     nproc = 2
     # Commandline exe to call "{exe}" /path/to/resolved-tool-contract.json
-    driver_exe = "python -m pbcommand.cli.example.dev_app --resolved-tool-contract "
+    driver_exe = "python -m pbcommand.cli.examples.dev_mixed_app --resolved-tool-contract "
     desc = "Dev app for Testing that supports emitting tool contracts"
     p = get_pbparser(TOOL_ID, __version__, "DevApp", desc, driver_exe,
                      is_distributed=False, nproc=nproc)
@@ -53,6 +53,12 @@ def add_rtc_options(p):
     p.add_input_file_type(FileTypes.CSV, "csv", "Input CSV", "Input csv description")
     p.add_output_file_type(FileTypes.REPORT, "rpt", "Output Report", "Output PacBio Report JSON", "example.report")
     p.add_int("pbcommand.task_options.alpha", "alpha", 25, "Alpha", "Alpha description")
+    p.add_float("pbcommand.task_options.beta", "beta", 1.234, "Beta", "Beta description")
+    p.add_boolean("pbcommand.task_options.gamma", "gamma", True, "Gamma", "Gamma description")
+    p.add_choice_str("pbcommand.task_options.ploidy", "ploidy", ["haploid", "diploid"], "Ploidy", "Genome ploidy", "haploid")
+    p.add_choice_int("pbcommand.task_options.delta", "delta", [1,2,3], "Delta", "An integer choice", default=1)
+    p.add_choice_float("pbcommand.task_options.epsilon", "epsilon", [0.01, 0.1, 1.0], "Epsilon", "A float choice", default=0.1)
+    p.add_str("pbcommand.task_options.comment", "comment", "asdf", "Comments", "A string parameter")
     return p
 
 
@@ -65,7 +71,7 @@ def add_argparse_only(p):
     :return:
     """
     p.add_argument("--output-h5", type=str, help="Optional output H5 file.")
-    p.add_argument("--beta", type=int, default=1234, help="Example option")
+    p.add_argument("--zeta", type=int, default=1234, help="Example option")
     return p
 
 
@@ -78,9 +84,12 @@ def get_contract_parser():
     return p
 
 
-def _fake_main(csv, report_json, alpha=1, beta=1234, output_h5=None):
-    _d = dict(c=csv, r=report_json, a=alpha, b=beta, h=output_h5)
-    log.info("Running main with {c} {r} alpha={a} beta={b} h5={h}".format(**_d))
+def _fake_main(csv, report_json, alpha=1, beta=1.234, gamma=True, delta=1, epsilon=1234,
+               output_h5=None, ploidy=None, zeta=None):
+    _d = dict(c=csv, r=report_json, a=alpha, b=beta, g=gamma, d=delta, e=epsilon, h=output_h5, p=ploidy)
+    log.info("Running main with {c} {r} alpha={a} beta={b} gamma={g} delta={d} epsilon={e} h5={h} p={p}".format(**_d))
+    with open(report_json, "w") as f:
+        f.write("{}")
     return 0
 
 
@@ -90,7 +99,7 @@ def args_runner(args):
     csv = args.csv
     report_json = args.rpt
     output_h5 = args.output_h5
-    return _fake_main(csv, report_json, alpha=args.alpha, beta=args.beta, output_h5=output_h5)
+    return _fake_main(csv, report_json, alpha=args.alpha, beta=args.beta, gamma=args.gamma, epsilon=args.epsilon, output_h5=output_h5, ploidy=args.ploidy, zeta=args.zeta)
 
 
 def resolved_tool_contract_runner(rtc):
@@ -103,7 +112,14 @@ def resolved_tool_contract_runner(rtc):
     csv = rtc.task.input_files[0]
     rpt = rtc.task.output_files[0]
     alpha = rtc.task.options["pbcommand.task_options.alpha"]
-    return _fake_main(csv, rpt, alpha=alpha)
+    beta = rtc.task.options["pbcommand.task_options.beta"]
+    gamma = rtc.task.options["pbcommand.task_options.gamma"]
+    ploidy = rtc.task.options["pbcommand.task_options.ploidy"]
+    delta = rtc.task.options["pbcommand.task_options.delta"]
+    epsilon = rtc.task.options["pbcommand.task_options.epsilon"]
+    comments = rtc.task.options["pbcommand.task_options.comment"]
+    return _fake_main(csv, rpt, alpha=alpha, beta=beta, gamma=gamma,
+                      ploidy=ploidy)
 
 
 def main(argv=sys.argv):
diff --git a/pbcommand/cli/examples/dev_quick_hello_world.py b/pbcommand/cli/examples/dev_quick_hello_world.py
index e0bf8a5..1bf2acb 100644
--- a/pbcommand/cli/examples/dev_quick_hello_world.py
+++ b/pbcommand/cli/examples/dev_quick_hello_world.py
@@ -1,4 +1,5 @@
 import sys
+import pprint
 import logging
 
 from pbcommand.models import FileTypes, OutputFileType
@@ -6,6 +7,9 @@ from pbcommand.cli import registry_builder, registry_runner, QuickOpt
 
 log = logging.getLogger(__name__)
 
+# Version of the Commandline Tool
+__version__ = "0.1.2"
+
 registry = registry_builder("pbcommand", "python -m pbcommand.cli.examples.dev_quick_hello_world ")
 
 
@@ -16,7 +20,10 @@ def _example_main(input_files, output_files, **kwargs):
     xs = output_files if isinstance(output_files, (list, tuple)) else [output_files]
     for x in xs:
         with open(x, 'w') as writer:
-            writer.write("Mock data\n")
+            if len(kwargs) > 0:
+                pprint.pprint(dict(kwargs), writer)
+            else:
+                writer.write("Mock data\n")
     return 0
 
 
@@ -27,7 +34,8 @@ def run_rtc(rtc):
 
 
 @registry("dev_fastq2fasta", "0.1.0", FileTypes.FASTQ, FileTypes.FASTA,
-          options=dict(beta=QuickOpt(1234.0, "Beta Name", "Beta Description"), gamma=True))
+          options=dict(beta=QuickOpt(1234.0, "Beta Name", "Beta Description"), gamma=True),
+          name="Fastq to Fasta", description="Dev Task Fastq to Fasta Example")
 def run_rtc(rtc):
     return _example_main(rtc.task.input_files[0], rtc.task.output_files[0])
 
@@ -37,6 +45,14 @@ def run_rtc(rtc):
     return _example_main(rtc.task.input_files, rtc.task.output_files)
 
 
+ at registry("dev_test_options", "0.1.0", FileTypes.TXT, FileTypes.TXT,
+          nproc=1,
+          options=dict(alpha=1234, beta=5.4321, gamma=True, ploidy=("haploid", "diploid"), delta=(1,2,3), epsilon=(0.01,0.1,1.0)))
+def run_rtc(rtc):
+    log.debug("Dev Quick Hello World Example with various option types")
+    return _example_main(rtc.task.input_files[0], rtc.task.output_files[0], options=rtc.task.options)
+
+
 def _to_output(i, file_type):
     default_name = "_".join([file_type.file_type_id, file_type.base_name + "_" + str(i)])
     label = "label_" + file_type.file_type_id
@@ -48,7 +64,7 @@ def _to_outputs(file_types):
     return [_to_output(i, ft) for i, ft in enumerate(file_types)]
 
 
- at registry("dev_txt_custom_outs", "0.1.0", FileTypes.TXT, _to_outputs((FileTypes.TXT, FileTypes.TXT)))
+ at registry("dev_txt_custom_outs", "0.1.0", FileTypes.TXT, _to_outputs((FileTypes.TXT, FileTypes.TXT)), name="Custom Txt Task")
 def run_rtc(rtc):
     """Test for using OutputFileTypes as outputs
 
@@ -61,4 +77,5 @@ if __name__ == '__main__':
     default_log_level = logging.DEBUG
     sys.exit(registry_runner(registry,
                              sys.argv[1:],
-                             default_log_level=default_log_level))
+                             default_log_level=default_log_level,
+                             version=__version__))
diff --git a/pbcommand/cli/examples/dev_scatter_fasta_app.py b/pbcommand/cli/examples/dev_scatter_fasta_app.py
index 7479956..c389740 100644
--- a/pbcommand/cli/examples/dev_scatter_fasta_app.py
+++ b/pbcommand/cli/examples/dev_scatter_fasta_app.py
@@ -1,15 +1,4 @@
-"""Example of Generating a Chunk.json file that 'scatters' a pair of fasta files
-
-
-In the example, the first fasta file is chunked, while the path to the second
-fasta file is passed directly.
-
-It generates a fasta_1_id and fasta_2_id chunk keys,
-
-There's a bit of code here that is copied from pbsmrtpipe.tools.chunk_utils.
-
-Martin will eventually refactor this into pbcore.
-"""
+"""Example of Generating a Chunk.json file that 'scatters' a pair of fasta files"""
 import os
 import logging
 import sys
@@ -127,8 +116,8 @@ def get_parser():
 
     driver = "python -m pbcommand.cli.examples.dev_scatter_fasta_app --resolved-tool-contract "
     desc = "Scatter a single fasta file to create chunk.json file"
-    # chunk keys that will be written to the file
-    chunk_keys = ("$chunk.fasta_id", )
+    # chunk keys that **will** be written to the file
+    chunk_keys = (Constants.FA_CHUNK_KEY, )
     p = get_scatter_pbparser(TOOL_ID, __version__, "Fasta Scatter",
                              desc, driver, chunk_keys, is_distributed=False)
     p.add_input_file_type(FileTypes.FASTA, "fasta_in", "Fasta In", "Fasta file to scatter")
diff --git a/pbcommand/cli/examples/dev_simple_app.py b/pbcommand/cli/examples/dev_simple_app.py
old mode 100755
new mode 100644
diff --git a/pbcommand/cli/quick.py b/pbcommand/cli/quick.py
index 52ae47d..1ad85f5 100644
--- a/pbcommand/cli/quick.py
+++ b/pbcommand/cli/quick.py
@@ -12,8 +12,11 @@ from .core import get_default_argparser_with_base_opts
 from pbcommand.common_options import add_base_options, add_common_options
 
 from pbcommand.models import (ToolContractTask, ToolContract,
-                              InputFileType, OutputFileType, FileType)
-from pbcommand.models.parser import (to_option_schema, JsonSchemaTypes)
+                              InputFileType, OutputFileType, FileType,
+                              PacBioIntChoiceOption, PacBioStringOption,
+                              PacBioFloatOption, PacBioBooleanOption,
+                              PacBioIntOption,
+                              PacBioStringChoiceOption, PacBioFloatChoiceOption)
 from pbcommand.models.tool_contract import ToolDriver
 from pbcommand.pb_io import (load_resolved_tool_contract_from,
                              write_tool_contract)
@@ -53,27 +56,55 @@ def _file_type_to_output_file_type(file_type, index):
                           file_type.default_name)
 
 
-def __convert_to_option(jtype, namespace, key, value, name=None, description=None):
-    """Convert to Option dict
+def __convert_to_choice_option(option_id, default_value_or_choices, name, description, choices=None):
+    """Enable some looseness in the inputs
+
+    if the default_value is provided by a list or tuple, assume the default value is the first
+    value.
+
+    Else, assume the choices and default value was provided
 
-    This really should have been a concrete type, at least a namedtuple
     """
-    opt_id = ".".join([namespace, 'task_options', key])
-    name = "Option {n}".format(n=key) if name is None else name
-    desc = "Option {n} description".format(n=key) if description is None else description
-    opt = to_option_schema(opt_id, jtype, name, desc, value)
-    return opt
+    # FIXME, this method is somewhat duplicated with the from dict serialization IO layer
+
+    def _is_list(x):
+        return isinstance(x, (tuple, list))
 
+    if _is_list(default_value_or_choices):
+        value = default_value_or_choices[0]
+        r_choices = default_value_or_choices
+    else:
+        value = default_value_or_choices
+        r_choices = choices
 
-def _convert_to_option(namespace, key, value, name=None, description=None):
     if isinstance(value, basestring):
-        opt = __convert_to_option(JsonSchemaTypes.STR, namespace, key, value, name=name, description=description)
+        opt = PacBioStringChoiceOption(option_id, name, value, description, r_choices)
+    elif isinstance(value, int):
+        opt = PacBioIntChoiceOption(option_id, name, value, description, r_choices)
+    elif isinstance(value, float):
+        opt = PacBioFloatChoiceOption(option_id, name, value, description, r_choices)
+    else:
+        raise TypeError("Invalid choice type {t} of default:{d} and choices: {c}")
+
+    return opt
+
+
+def _convert_to_option(namespace, key, value, name=None, description=None, choices=None):
+
+    opt_id = ".".join([namespace, 'task_options', key])
+    r_name = "Option {n}".format(n=key) if name is None else name
+    r_desc = "Option {n} description".format(n=key) if description is None else description
+
+    if isinstance(value, (tuple, list)) or isinstance(choices, (tuple, list)):
+        opt = __convert_to_choice_option(opt_id, value, r_name, r_desc, choices=choices)
+    elif isinstance(value, basestring):
+        opt = PacBioStringOption(opt_id, r_name, value, r_desc)
     elif isinstance(value, bool):
-        opt = __convert_to_option(JsonSchemaTypes.BOOL, namespace, key, value, name=name, description=description)
+        opt = PacBioBooleanOption(opt_id, r_name, value, r_desc)
     elif isinstance(value, int):
-        opt = __convert_to_option(JsonSchemaTypes.INT, namespace, key, value, name=name, description=description)
+        opt = PacBioIntOption(opt_id, r_name, value, r_desc)
     elif isinstance(value, float):
-        opt = __convert_to_option(JsonSchemaTypes.NUM, namespace, key, value, name=name, description=description)
+        opt = PacBioFloatOption(opt_id, r_name, value, r_desc)
     else:
         raise TypeError("Unsupported option {k} type. {t} ".format(k=key, t=type(value)))
 
@@ -124,7 +155,7 @@ class Registry(object):
                   d=self.driver_base, t=len(self.rtc_runners))
         return "<{k} {n} {d} tool-contracts:{t} >".format(**_d)
 
-    def __call__(self, tool_id, version, input_types, output_types, options=None, nproc=1, is_distributed=True):
+    def __call__(self, tool_id, version, input_types, output_types, options=None, nproc=1, is_distributed=True, name=None, description=None):
         def _w(func):
             """
 
@@ -137,8 +168,12 @@ class Registry(object):
             otypes = _to_list(output_types)
 
             global_id = ".".join([self.namespace, 'tasks', tool_id])
-            name = "Tool {n}".format(n=tool_id)
-            desc = "Quick tool {n} {g}".format(n=tool_id, g=global_id)
+
+            def _or_default(value_, default_value):
+                return default_value if value_ is None else value_
+
+            display_name = _or_default(name, "Tool {n}".format(n=tool_id))
+            desc = _or_default(description, "Quick tool {n} {g}".format(n=tool_id, g=global_id))
 
             input_file_types = [_file_type_to_input_file_type(ft, i) for i, ft in enumerate(itypes)]
             output_file_types = [_transform_output_ftype(ft, i) for i, ft in enumerate(otypes)]
@@ -149,7 +184,7 @@ class Registry(object):
                 tool_options = [_convert_to_raw_option(self.namespace, key, value) for key, value in options.iteritems()]
 
             resource_types = []
-            task = ToolContractTask(global_id, name, desc, version, is_distributed,
+            task = ToolContractTask(global_id, display_name, desc, version, is_distributed,
                                     input_file_types, output_file_types, tool_options, nproc, resource_types)
             # trailing space if for 'my-tool --resolved-tool-contract ' /path/to/rtc.json
             driver_exe = " ".join([self.driver_base, Constants.RTC_DRIVER, " "])
@@ -262,7 +297,7 @@ def __args_emit_tc_runner(registry):
             sys.stderr.write("ERROR. Unable to find tool-contract id {i}".format(i=tc_id))
             return -1
         else:
-            print json.dumps(tc.to_dict(), sort_keys=True, indent=4)
+            print json.dumps(tc.to_dict(), sort_keys=True, indent=4, separators=(',', ': '))
             return 0
     return _w
 
@@ -298,9 +333,11 @@ def _to_registry_parser(version, description, default_log_level):
     return _f
 
 
-def registry_runner(registry, argv, default_log_level=logging.INFO):
+def registry_runner(registry, argv, default_log_level=logging.INFO, version="0.1.0"):
     """Runs a registry
 
+    :arg version: Is the version of the Commandline tool, not the TCs or tasks.
+
     1. Manually build an argparser that has
 
     For running:
@@ -314,7 +351,7 @@ def registry_runner(registry, argv, default_log_level=logging.INFO):
 
     :type registry: Registry
     """
-    f = _to_registry_parser('0.1.1', "Multi-quick-tool-runner for {r}".format(r=registry.namespace), default_log_level)
+    f = _to_registry_parser(version, "Multi-quick-tool-runner for {r}".format(r=registry.namespace), default_log_level)
     p = f(registry)
     args = p.parse_args(argv)
     # The logger needs to be setup only in specific subparsers. Some commands
diff --git a/pbcommand/cli/utils.py b/pbcommand/cli/utils.py
old mode 100755
new mode 100644
diff --git a/pbcommand/engine/runner.py b/pbcommand/engine/runner.py
index 4af52a4..2d6f454 100644
--- a/pbcommand/engine/runner.py
+++ b/pbcommand/engine/runner.py
@@ -13,7 +13,7 @@ log = logging.getLogger(__name__)
 ExtCmdResult = namedtuple("ExtCmdResult", "exit_code cmd run_time")
 
 
-def run_cmd(cmd, stdout_fh, stderr_fh, shell=True, time_out=None):
+def run_cmd(cmd, stdout_fh, stderr_fh, shell=True, time_out=None, env=None, executable="/bin/bash"):
     """Run external command
 
 
@@ -43,7 +43,10 @@ def run_cmd(cmd, stdout_fh, stderr_fh, shell=True, time_out=None):
 
     hostname = platform.node()
     log.debug("calling cmd '{c}' on {h}".format(c=cmd, h=hostname))
-    process = subprocess.Popen(cmd, stderr=stderr_fh, stdout=stdout_fh, shell=shell)
+    process = subprocess.Popen(cmd, stderr=stderr_fh, stdout=stdout_fh,
+                               shell=shell,
+                               executable=executable,
+                               env=env)
 
     # This needs a better dynamic model
     max_sleep_time = 5
diff --git a/pbcommand/interactive_resolver.py b/pbcommand/interactive_resolver.py
index 4c6a076..eaacafa 100644
--- a/pbcommand/interactive_resolver.py
+++ b/pbcommand/interactive_resolver.py
@@ -7,7 +7,8 @@ import warnings
 from pbcommand.cli import get_default_argparser
 from pbcommand.models import SymbolTypes
 from pbcommand.pb_io import (load_tool_contract_from,
-                             write_resolved_tool_contract)
+                             write_resolved_tool_contract,
+                             write_resolved_tool_contract_avro)
 
 from pbcommand.resolver import resolve_tool_contract
 
@@ -37,17 +38,35 @@ def run_main(tc):
         in_path = get_input(" {i} file {p} path :".format(i=i, p=input_type))
         if not os.path.exists(in_path):
             warnings.warn("Unable to find {p}".format(p=in_path))
-        input_files.append(in_path)
+
+        # Make sure all inputs are abspaths
+        p = in_path if os.path.isabs(in_path) else os.path.abspath(in_path)
+        input_files.append(p)
 
     tool_options = {}
     rtc = resolve_tool_contract(tc, input_files, output_dir, '/tmp', int(nproc), tool_options, is_distributable=False)
     print rtc
 
-    file_name = tc.task.task_id + "_resolved_tool_contract.json"
-    rtc_path = os.path.join(output_dir, file_name)
+    def to_n(ext):
+        return "resolved_tool_contract." + ext
+
+    def to_f(ext):
+        return "_".join([tc.task.task_id, to_n(ext)])
+
+    def to_p(ext):
+        return os.path.join(output_dir, to_f(ext))
+
+    rtc_path = to_p("json")
     print "writing RTC to {f}".format(f=rtc_path)
+
+    # Always write the JSON RTC file
     write_resolved_tool_contract(rtc, rtc_path)
 
+    if rtc.driver.serialization.lower() == "avro":
+        avro_rtc_path = to_p("avro")
+        print "writing AVRO RTC to {f}".format(f=avro_rtc_path)
+        write_resolved_tool_contract_avro(rtc, avro_rtc_path)
+
     return rtc
 
 
diff --git a/pbcommand/models/__init__.py b/pbcommand/models/__init__.py
index 55bcc28..7b60ee8 100644
--- a/pbcommand/models/__init__.py
+++ b/pbcommand/models/__init__.py
@@ -1,8 +1,16 @@
-from .common import (FileType, FileTypes,
+from .common import (FileType, FileTypes, TaskOptionTypes,
                      DataSetFileType, DataSetMetaData,
                      TaskTypes, ResourceTypes, SymbolTypes,
-                     PipelineChunk, DataStoreFile, DataStore)
+                     PipelineChunk, DataStoreFile, DataStore,
+                     DataStoreViewRule, PipelineDataStoreViewRules,
+                     BasePacBioOption,
+                     PacBioIntOption, PacBioBooleanOption,
+                     PacBioStringOption, PacBioFloatOption,
+                     PacBioIntChoiceOption,
+                     PacBioFloatChoiceOption, PacBioStringChoiceOption)
 from .tool_contract import *
 from .parser import (get_pbparser,
                      get_gather_pbparser,
                      get_scatter_pbparser, PbParser)
+
+from .conditions import (ReseqCondition, ReseqConditions)
diff --git a/pbcommand/models/common.py b/pbcommand/models/common.py
index c21901e..3acfb9b 100644
--- a/pbcommand/models/common.py
+++ b/pbcommand/models/common.py
@@ -9,6 +9,7 @@ import json
 import logging
 import os
 import re
+import types
 import warnings
 import functools
 import datetime
@@ -45,6 +46,8 @@ class PacBioNamespaces(object):
     PBSMRTPIPE_CONSTANTS_PREFIX = 'pbsmrtpipe.constants'
     # Pipelines
     PBSMRTPIPE_PIPELINES = "pbsmrtpipe.pipelines"
+    # Option Types
+    PBSMRTPIPE_OPTS_TYPE = "pbsmrtpipe.option_types"
 
 
 def __to_type(prefix, name):
@@ -59,6 +62,7 @@ to_task_types_ns = functools.partial(__to_type, PacBioNamespaces.PB_TASK_TYPES)
 to_workflow_option_ns = functools.partial(__to_type, PacBioNamespaces.PBSMRTPIPE_OPTS_PREFIX)
 to_pipeline_ns = functools.partial(__to_type, PacBioNamespaces.PBSMRTPIPE_PIPELINES)
 to_index_ns = functools.partial(__to_type, PacBioNamespaces.PB_INDEX)
+to_opt_type_ns = functools.partial(__to_type, PacBioNamespaces.PBSMRTPIPE_OPTS_TYPE)
 
 
 class TaskTypes(object):
@@ -69,10 +73,90 @@ class TaskTypes(object):
     GATHERED = to_task_types_ns("gathered")
 
 
-class SymbolTypes(object):
+class TaskOptionTypes(object):
+    """Core Task Option type id type"""
+    # FIXME(mpkocher)(2016-7-16) This should be more well defined, e.g., int32 and use the same id format of
+    # For example, pacbio.option_types.int32
+
+    # Because of the Avro schema restrictions and to keep the keys short
+    # in name, we'll use a dot let format. The legacy format used
+    # pbsmrtpipe.option_types.* as the root namespace
+    INT = "integer"
+    BOOL = "boolean"
+    STR = "string"
+    FLOAT = "float"
+    # Choice type Options
+    CHOICE_STR = "choice_string"
+    CHOICE_INT = "choice_integer"
+    CHOICE_FLOAT = "choice_float"
+
+    @classmethod
+    def ALL(cls):
+        """Return a set of all Task Option Types"""
+        return {cls.INT, cls.BOOL, cls.STR, cls.FLOAT, cls.CHOICE_STR,
+                cls.CHOICE_INT, cls.CHOICE_FLOAT}
+
+    @classmethod
+    def _raise_value_error(cls, value, allowed, option_type_name):
+        raise ValueError("Incompatible task {o} option type id '{s}'. "
+                         "Allowed values {v}".format(o=option_type_name,
+                                                     s=value,
+                                                     v=",".join(allowed)))
+
+    @classmethod
+    def ALL_SIMPLE(cls):
+        """Returns a set of 'simple' task option types (e.g., boolean, string, int, float)"""
+        return {cls.STR, cls.BOOL, cls.INT, cls.FLOAT}
+
+    @classmethod
+    def from_simple_str(cls, sx):
+        """Validates a string is a validate task option type id or raise ValueError
+
+        :raises ValueError
+        """
+        if sx in cls.ALL_SIMPLE():
+            return sx
+        else:
+            cls._raise_value_error(sx, cls.ALL_SIMPLE(), "simple")
+
+    @classmethod
+    def ALL_CHOICES(cls):
+        """Returns a set of choice task option types"""
+        return {cls.CHOICE_INT, cls.CHOICE_FLOAT, cls.CHOICE_STR}
+
+    @classmethod
+    def is_choice(cls, sx):
+        return sx in cls.ALL_CHOICES()
+
+    @classmethod
+    def from_choice_str(cls, sx):
+        """Validates and returns a task choice option type or raises ValueError"""
+        if sx in cls.ALL_CHOICES():
+            return sx
+        else:
+            cls._raise_value_error(sx, cls.ALL_CHOICES(), "choice")
+
+    @classmethod
+    def from_str(cls, sx):
+        """Validates and returns a valid type option type id or raises ValueError,
 
-    """*Symbols* that are understood during resolving, such as max number of
-    processors, Max Chunks"""
+        :note: For legacy reasons, "number" will be mapped to "float"
+        """
+        # FIXME, Legacy fix, "number" appears to mean "float"?
+        if sx == "number":
+            sx = TaskOptionTypes.FLOAT
+
+        if sx in TaskOptionTypes.ALL():
+            return sx
+        else:
+            cls._raise_value_error(sx, cls.ALL(), "")
+
+
+class SymbolTypes(object):
+    """
+    *Symbols* that are understood during resolving, such as max number of
+    processors, Max Chunks. Used when defining a Tool Contract
+    """
     MAX_NPROC = '$max_nproc'
     MAX_NCHUNKS = '$max_nchunks'
     TASK_TYPE = '$task_type'
@@ -84,15 +168,17 @@ class SymbolTypes(object):
 
 
 class ResourceTypes(object):
-
-    """Resources such as tmp dirs and files, log files"""
+    """
+    Resources such as tmp dirs and files, log files. Used when defining
+    a Tool Contract
+    """
     TMP_DIR = '$tmpdir'
     TMP_FILE = '$tmpfile'
     LOG_FILE = '$logfile'
     # tasks can write output to this directory
     OUTPUT_DIR = '$outputdir'
     # Not sure this is a good idea
-    #TASK_DIR = '$taskdir'
+    # TASK_DIR = '$taskdir'
 
     @classmethod
     def ALL(cls):
@@ -144,6 +230,15 @@ class FileType(object):
     __metaclass__ = _RegisteredFileType
 
     def __init__(self, file_type_id, base_name, ext, mime_type):
+        """
+        Core File Type data model
+
+        :param file_type_id: unique file string
+        :param base_name: default base name of the file (without extension)
+        :param ext: file extension
+        :param mime_type:  file mimetype
+        :return:
+        """
         self.file_type_id = file_type_id
         self.base_name = base_name
         self.ext = ext
@@ -154,6 +249,7 @@ class FileType(object):
 
     @property
     def default_name(self):
+        """ Default name of file alias for base_name"""
         return self.base_name # ".".join([self.base_name, self.ext])
 
     def __eq__(self, other):
@@ -180,12 +276,15 @@ class DataSetFileType(FileType):
 
 
 class MimeTypes(object):
+    """Supported Mime types"""
     JSON = 'application/json'
     TXT = 'text/plain'
     CSV = 'text/csv'
     XML = 'application/xml'
     BINARY = 'application/octet-stream'
     PICKLE = 'application/python-pickle'
+    GZIP = 'application/x-gzip'
+    ZIP = 'application/zip'
 
 
 class FileTypes(object):
@@ -236,6 +335,7 @@ class FileTypes(object):
     SAM = FileType(to_file_ns('sam'), "alignments", "sam", MimeTypes.BINARY)
     VCF = FileType(to_file_ns('vcf'), "file", "vcf", MimeTypes.TXT)
     GFF = FileType(to_file_ns('gff'), "file", "gff", MimeTypes.TXT)
+    BIGWIG = FileType(to_file_ns('bigwig'), "annotations", "bw", MimeTypes.BINARY)
     CSV = FileType(to_file_ns('csv'), "file", "csv", MimeTypes.CSV)
     XML = FileType(to_file_ns('xml'), "file", "xml", 'application/xml')
     # Generic Json File
@@ -244,6 +344,9 @@ class FileTypes(object):
     H5 = FileType(to_file_ns("h5"), "file", "h5", MimeTypes.BINARY)
     # Generic Python pickle XXX EVIL
     PICKLE = FileType(to_file_ns("pickle"), "file", "pickle", MimeTypes.PICKLE)
+    # GZIPped archive
+    GZIP = FileType(to_file_ns("gzip"), "file", "gz", MimeTypes.GZIP)
+    ZIP = FileType(to_file_ns("zip"), "file", "zip", MimeTypes.ZIP)
 
     # ******************* NEW SA3 File Types ********************
     # DataSet Types. The default file names should have well-defined agreed
@@ -258,6 +361,8 @@ class FileTypes(object):
     DS_BARCODE = DataSetFileType(to_ds_ns("BarcodeSet"), "file", "barcodeset.xml", MimeTypes.XML)
     DS_ALIGN_CCS = DataSetFileType(to_ds_ns("ConsensusAlignmentSet"), "file",
                                    "consensusalignmentset.xml", MimeTypes.XML)
+    DS_GMAP_REF = DataSetFileType(to_ds_ns("GmapReferenceSet"), "file",
+                                  "gmapreferenceset.xml", MimeTypes.XML)
 
     # PacBio Defined Formats
     # **** Index Files
@@ -281,6 +386,10 @@ class FileTypes(object):
     FASTA_REF = FileType("PacBio.ReferenceFile.ReferenceFastaFile", "file", "pbreference.fasta", MimeTypes.TXT)
     CONTIG_FA = FileType("PacBio.ContigFile.ContigFastaFile", "file", "contig.fasta", MimeTypes.TXT)
 
+    # Adapter Fasta File From PPA
+    FASTA_ADAPTER = FileType("PacBio.SubreadFile.AdapterFastaFile", "file", "adapters.fasta", MimeTypes.TXT)
+    FASTA_CONTROL = FileType("PacBio.SubreadFile.ControlFastaFile", "file", "control.fasta", MimeTypes.TXT)
+
     # BAM dialects
     BAM_ALN = FileType("PacBio.AlignmentFile.AlignmentBamFile", "file", "alignment.bam", MimeTypes.BINARY)
     BAM_SUB = FileType("PacBio.SubreadFile.SubreadBamFile", "file", "subread.bam", MimeTypes.BINARY)
@@ -296,9 +405,10 @@ class FileTypes(object):
 
     # sts.xml
     STS_XML = FileType("PacBio.SubreadFile.ChipStatsFile", "file", "sts.xml", MimeTypes.XML)
+    STS_H5 = FileType("PacBio.SubreadFile.ChipStatsH5File", "file", "sts.h5", MimeTypes.BINARY)
 
-    # THIS IS EXPERIMENT for internal analysis. DO NOT use
-    COND = FileType(to_file_ns("COND"), "file", "conditions.json", MimeTypes.JSON)
+    # Resequencing Conditions File Format
+    COND_RESEQ = FileType(to_file_ns("COND_RESEQ"), "file", "conditions-reseq.json", MimeTypes.JSON)
 
     @staticmethod
     def is_valid_id(file_type_id):
@@ -324,6 +434,18 @@ class DataStoreFile(object):
 
     def __init__(self, uuid, source_id, type_id, path, is_chunked=False,
                  name="", description=""):
+        """
+
+        :param uuid: UUID of the datstore file
+        :param source_id: source id of the DataStore file
+        :param type_id: File Type id of
+        :param path: Absolute path to the datastore file
+        :param is_chunked: is the datastore file a "chunked" file from a scatter/chunking task
+        :param name: Display name of datastore file
+        :param description: Description of the datastore file
+
+        """
+
         # adding this for consistency. In the scala code, the unique id must be
         # a uuid format
         self.uuid = uuid
@@ -385,6 +507,8 @@ class DataStore(object):
 
     def __init__(self, ds_files, created_at=None):
         """
+        :param ds_files: list of datastore file instances
+        :param created_at: Date the datastore was created. if None, will use the current datetime
 
         :type ds_files: list[DataStoreFile]
         """
@@ -410,26 +534,22 @@ class DataStore(object):
                   updatedAt=_datetime_to_string(self.updated_at), files=fs)
         return _d
 
-    def _write_json(self, file_name, permission):
-        with open(file_name, permission) as f:
-            s = json.dumps(self.to_dict(), indent=4, sort_keys=True)
-            f.write(s)
-
     def write_json(self, file_name):
-        # if the file exists is should raise?
-        self._write_json(file_name, 'w')
+        write_dict_to_json(self.to_dict(), file_name, "w")
 
     def write_update_json(self, file_name):
         """Overwrite Datastore with current state"""
-        self._write_json(file_name, 'w+')
+        write_dict_to_json(self.to_dict(), file_name, "w+")
 
     @staticmethod
     def load_from_d(d):
+        """Load DataStore from a dict"""
         ds_files = [DataStoreFile.from_dict(x) for x in d['files']]
         return DataStore(ds_files)
 
     @staticmethod
     def load_from_json(path):
+        """Load DataStore from a JSON file"""
         with open(path, 'r') as reader:
             d = json.loads(reader.read())
         return DataStore.load_from_d(d)
@@ -507,3 +627,332 @@ class PipelineChunk(object):
 
     def to_dict(self):
         return {'chunk_id': self.chunk_id, 'chunk': self._datum}
+
+
+class DataStoreViewRule(object):
+    """
+    Rule specifying if and how the UI should display a datastore file.
+    """
+    def __init__(self, source_id, file_type_id, is_hidden, name="",
+                 description=""):
+        """
+        :param source_id: Unique source id of the datastore file
+        :param file_type_id: File Type id of the datastore file
+        :param is_hidden: Mark the file has hidden
+        :param name: Display name of the file
+        :param description: Description of the file
+        """
+
+        # for generating rules compositionally in Python, it's easier to just
+        # pass the FileType object directly
+        if isinstance(file_type_id, FileType):
+            file_type_id = file_type_id.file_type_id
+        assert FileTypes.is_valid_id(file_type_id), file_type_id
+        self.source_id = source_id
+        self.file_type_id = file_type_id
+        self.is_hidden = is_hidden
+        self.name = name
+        self.description = description
+
+    def to_dict(self):
+        return {"sourceId": self.source_id, "fileTypeId": self.file_type_id,
+                "isHidden": self.is_hidden, "name": self.name,
+                "description": self.description}
+
+    @staticmethod
+    def from_dict(d):
+        return DataStoreViewRule(d['sourceId'], d['fileTypeId'], d['isHidden'],
+                                 d.get('name', ''), d.get('description', ''))
+
+
+class PipelineDataStoreViewRules(object):
+    """
+    A collection of DataStoreViewRule objects associated with a pipeline.
+    """
+
+    def __init__(self, pipeline_id, smrtlink_version, rules=()):
+        self.pipeline_id = pipeline_id
+        self.smrtlink_version = smrtlink_version
+        self.rules = list(rules)
+
+    def to_dict(self):
+        return {"pipelineId": self.pipeline_id,
+                "smrtlinkVersion": self.smrtlink_version,
+                "rules": [r.to_dict() for r in self.rules]}
+
+    @staticmethod
+    def from_dict(d):
+        return PipelineDataStoreViewRules(
+            pipeline_id=d['pipelineId'],
+            smrtlink_version=d['smrtlinkVersion'],
+            rules=[DataStoreViewRule.from_dict(r) for r in d['rules']])
+
+    @staticmethod
+    def load_from_json(path):
+        with open(path, 'r') as reader:
+            d = json.loads(reader.read())
+        return PipelineDataStoreViewRules.from_dict(d)
+
+    def write_json(self, file_name):
+        write_dict_to_json(self.to_dict(), file_name)
+
+
+def write_dict_to_json(d, file_name, permission="w"):
+    with open(file_name, permission) as f:
+        s = json.dumps(d, indent=4, sort_keys=True,
+                       separators=(',', ': '))
+        f.write(s)
+
+
+RX_TASK_ID = re.compile(r'^([A-z0-9_]*)\.tasks\.([A-z0-9_]*)$')
+RX_TASK_OPTION_ID = re.compile(r'^([A-z0-9_]*)\.task_options\.([A-z0-9_\.]*)')
+
+
+def _validate_id(prog, idtype, tid):
+    if prog.match(tid):
+        return tid
+    else:
+        raise ValueError("Invalid format {t}: '{i}' {p}".format(t=idtype, i=tid, p=repr(prog.pattern)))
+
+validate_task_id = functools.partial(_validate_id, RX_TASK_ID, 'task id')
+validate_task_option_id = functools.partial(_validate_id, RX_TASK_OPTION_ID,
+                                            'task option id')
+
+
+class BasePacBioOption(object):
+    # This is an abstract class. This really blurring the abstract with
+    # implementation which makes the interface unclear.
+
+    # This MUST be a validate TaskOptionTypes.* value.
+    OPTION_TYPE_ID = "UNKNOWN"
+
+    @classmethod
+    def validate_core_type(cls, value):
+        """
+        Every Option has a "core" type that needs to validated in the
+        constructor. The function should return the value
+
+        Subclasses should implement
+
+        :param value: Option value
+        :return: validated value
+        """
+
+        raise NotImplementedError
+
+    def validate_option(self, value):
+        """Core method used externally (e.g., resolvers) to validate option
+
+        The default implementation will only validate that the "core" type
+        is consistent with definition.
+
+        Subclasses should override this to leverage internal state (e.g, self.choices)
+        """
+        return self.validate_core_type(value)
+
+    def __init__(self, option_id, name, default, description):
+        """
+        Core constructor for the PacBio Task Option.
+
+        :param option_id: PacBio Task Option type id. Must adhere to the A-z0-9_
+        :param name: Display name of the Task Option
+        :param default: Default value
+        :param description: Description of the Task Option
+
+        :type option_id: str
+        :type name: str
+        :type description: str
+        """
+        self.option_id = validate_task_option_id(option_id)
+        self.name = name
+        self._default = self.validate_core_type(default)
+        self.description = description
+
+        # make sure subclasses have overwritten the OPTION_TYPE_ID.
+        # this will raise
+        if self.OPTION_TYPE_ID not in TaskOptionTypes.ALL():
+            msg = "InValid Task Option type id {t} Subclasses of {c} must " \
+                  "override OPTION_TYPE_ID to have a consistent value with " \
+                  "TaskOptionTypes.*".format(t=self.OPTION_TYPE_ID, c=self.__class__.__name__)
+            raise ValueError(msg)
+
+    @property
+    def default(self):
+        """Returns the default value for the option"""
+        return self._default
+
+    def __repr__(self):
+        _d = dict(i=self.option_id,
+                  n=self.name,
+                  v=self.default,
+                  k=self.__class__.__name__,
+                  t=self.OPTION_TYPE_ID)
+        return "<{k} {i} name: {n} default: {v} type:{t} >".format(**_d)
+
+    def to_dict(self):
+        option_type = TaskOptionTypes.from_str(self.OPTION_TYPE_ID)
+        # the same model is used in the pipeline template, so we break the
+        # snake case in favor of camelcase for the option type id.
+        return dict(id=self.option_id,
+                    name=self.name,
+                    default=self.default,
+                    description=self.description,
+                    optionTypeId=option_type)
+
+
+def _type_error_msg(value, expected_type):
+    return "{v} Expected {t}, got {x}".format(v=value, t=expected_type, x=type(value))
+
+
+def _strict_validate_int_or_raise(value):
+
+    def _to_msg(type_):
+        return _type_error_msg(value, type_)
+
+    if isinstance(value, types.BooleanType):
+        raise TypeError(_to_msg(types.BooleanType))
+    elif isinstance(value, types.FloatType):
+        raise TypeError(_to_msg(types.FloatType))
+    elif isinstance(value, types.StringType):
+        raise TypeError(_to_msg(types.StringType))
+    else:
+        return int(value)
+
+
+def _strict_validate_bool_or_raise(value):
+    if isinstance(value, types.BooleanType):
+        return value
+    raise TypeError(_type_error_msg(value, types.BooleanType))
+
+
+def _strict_validate_float_or_raise(value):
+
+    def _to_msg(type_):
+        return _type_error_msg(value, type_)
+
+    if isinstance(value, types.BooleanType):
+        raise TypeError(_to_msg(types.BooleanType))
+    elif isinstance(value, types.StringType):
+        raise TypeError(_to_msg(types.StringType))
+    else:
+        return float(value)
+
+
+def _strict_validate_string_or_raise(value):
+    # Not supporting unicode in any way
+    if isinstance(value, str):
+        return value
+    raise TypeError(_type_error_msg(value, str))
+
+
+class PacBioIntOption(BasePacBioOption):
+    OPTION_TYPE_ID = TaskOptionTypes.INT
+
+    @classmethod
+    def validate_core_type(cls, value):
+        return _strict_validate_int_or_raise(value)
+
+
+class PacBioFloatOption(BasePacBioOption):
+    OPTION_TYPE_ID = TaskOptionTypes.FLOAT
+
+    @classmethod
+    def validate_core_type(cls, value):
+        return _strict_validate_float_or_raise(value)
+
+
+class PacBioBooleanOption(BasePacBioOption):
+    OPTION_TYPE_ID = TaskOptionTypes.BOOL
+
+    @classmethod
+    def validate_core_type(cls, value):
+        return _strict_validate_bool_or_raise(value)
+
+
+class PacBioStringOption(BasePacBioOption):
+    OPTION_TYPE_ID = TaskOptionTypes.STR
+
+    @classmethod
+    def validate_core_type(cls, value):
+        return _strict_validate_string_or_raise(value)
+
+
+def _strict_validate_default_and_choices(core_type_validator_func):
+    """
+
+    :param core_type_validator_func: Function (value) => value or raises TypeError
+
+    Returns a func of (value, choices) => value, choices or raises TypeError
+    or Value Error.
+    """
+    def wrap(value, choices):
+        for choice in choices:
+            core_type_validator_func(choice)
+        v = core_type_validator_func(value)
+        if v not in choices:
+            raise ValueError("Default value {v} is not in allowed choices {c}".format(v=value, c=choices))
+        return v, choices
+    return wrap
+
+_strict_validate_int_choices = _strict_validate_default_and_choices(_strict_validate_int_or_raise)
+_strict_validate_str_choices = _strict_validate_default_and_choices(_strict_validate_string_or_raise)
+_strict_validate_bool_choices = _strict_validate_default_and_choices(_strict_validate_bool_or_raise)
+_strict_validate_float_choices = _strict_validate_default_and_choices(_strict_validate_float_or_raise)
+
+
+class BaseChoiceType(BasePacBioOption):
+
+    # This really should be Abstract
+    def __init__(self, option_id, name, default, description, choices):
+        super(BaseChoiceType, self).__init__(option_id, name, default, description)
+        _, validated_choices = self.validate_core_type_with_choices(default, choices)
+        self.choices = validated_choices
+
+    @classmethod
+    def validate_core_type_with_choices(cls, value, choices):
+        raise NotImplementedError
+
+    def validate_option(self, value):
+        v, _ = self.validate_core_type_with_choices(value, self.choices)
+        return v
+
+    def to_dict(self):
+        d = super(BaseChoiceType, self).to_dict()
+        d['choices'] = self.choices
+        return d
+
+
+class PacBioIntChoiceOption(BaseChoiceType):
+    OPTION_TYPE_ID = TaskOptionTypes.CHOICE_INT
+
+    @classmethod
+    def validate_core_type(cls, value):
+        return _strict_validate_int_or_raise(value)
+
+    @classmethod
+    def validate_core_type_with_choices(cls, value, choices):
+        return _strict_validate_int_choices(value, choices)
+
+
+class PacBioStringChoiceOption(BaseChoiceType):
+    OPTION_TYPE_ID = TaskOptionTypes.CHOICE_STR
+
+    @classmethod
+    def validate_core_type(cls, value):
+        return _strict_validate_string_or_raise(value)
+
+    @classmethod
+    def validate_core_type_with_choices(cls, value, choices):
+        return _strict_validate_str_choices(value, choices)
+
+
+class PacBioFloatChoiceOption(BaseChoiceType):
+    OPTION_TYPE_ID = TaskOptionTypes.CHOICE_FLOAT
+
+    @classmethod
+    def validate_core_type(cls, value):
+        return _strict_validate_float_or_raise(value)
+
+    @classmethod
+    def validate_core_type_with_choices(cls, value, choices):
+        return _strict_validate_float_choices(value, choices)
\ No newline at end of file
diff --git a/pbcommand/models/conditions.py b/pbcommand/models/conditions.py
new file mode 100644
index 0000000..e3aa1c5
--- /dev/null
+++ b/pbcommand/models/conditions.py
@@ -0,0 +1,32 @@
+"""Working doc for Condition data models
+
+https://gist.github.com/mpkocher/347f9ae9092c24888e1c702a916276c2
+
+"""
+from collections import namedtuple
+
+
+class ReseqCondition(namedtuple("ReseqCondition", "cond_id subreadset alignmentset referenceset")):
+    def to_dict(self):
+        return {"condId": self.cond_id,
+                "subreadset": self.subreadset,
+                "alignmentset": self.alignmentset,
+                "referenceset": self.referenceset}
+
+    @staticmethod
+    def from_dict(d):
+        def _f(k):
+            # sloppy
+            return d[k].encode('ascii', 'ignore')
+
+        return ReseqCondition(_f('condId'), _f('subreadset'), _f('alignmentset'), _f('referenceset'))
+
+
+class ReseqConditions(namedtuple("ReseqConditions", "conditions")):
+    # leave out the pipeline id. Not sure if this is necessary
+    def to_dict(self):
+        return {"conditions": [c.to_dict() for c in self.conditions]}
+
+    @staticmethod
+    def from_dict(d):
+        return ReseqConditions([ReseqCondition.from_dict(x) for x in d['conditions']])
diff --git a/pbcommand/models/parser.py b/pbcommand/models/parser.py
index ceec823..42baafe 100644
--- a/pbcommand/models/parser.py
+++ b/pbcommand/models/parser.py
@@ -7,11 +7,11 @@ import os
 import logging
 import argparse
 import functools
-import re
 
-import jsonschema
-
-from .common import SymbolTypes
+from .common import (SymbolTypes, validate_task_id,
+                     PacBioFloatOption, PacBioStringOption,
+                     PacBioBooleanOption, PacBioIntOption,
+                     PacBioStringChoiceOption, PacBioFloatChoiceOption, PacBioIntChoiceOption)
 from pbcommand.common_options import (add_base_options_with_emit_tool_contract,
                                       add_subcomponent_versions_option)
 from .tool_contract import (ToolDriver,
@@ -30,30 +30,11 @@ __all__ = ["PbParser",
            "get_scatter_pbparser",
            "get_gather_pbparser"]
 
-RX_TASK_ID = re.compile(r'^([A-z0-9_]*)\.tasks\.([A-z0-9_]*)$')
-RX_TASK_OPTION_ID = re.compile(r'^([A-z0-9_]*)\.task_options\.([A-z0-9_\.]*)')
-
 
 def _to_file_type(format_):
     return "pacbio.file_types.{x}".format(x=format_)
 
 
-class JsonSchemaTypes(object):
-    # array is a native type, but not supported
-    BOOL = "boolean"
-    INT = "integer"
-    NUM = "number"
-    STR = "string"
-    NULL = "null"
-    OBJ = "object"
-
-    # Optional values e.g., Option[String]
-    OPT_BOOL = [BOOL, NULL]
-    OPT_INT = [INT, NULL]
-    OPT_STR = [STR, NULL]
-    OPT_NUM = [NUM, NULL]
-
-
 def _validate_file(label, path):
     if os.path.exists(path):
         return os.path.abspath(path)
@@ -75,6 +56,10 @@ def _validate_option_or_cast(dtype, dvalue):
                         "expected".format(a=dvalue, e=dtype))
 
 
+def to_opt_id(namespace, s):
+    return ".".join([namespace, "options", s])
+
+
 def _validate_option(dtype, dvalue):
     if isinstance(dvalue, dtype):
         return dvalue
@@ -83,89 +68,12 @@ def _validate_option(dtype, dvalue):
                         "expected".format(a=dvalue, e=dtype))
 
 
-def _validate_id(prog, idtype, tid):
-    if prog.match(tid):
-        return tid
-    else:
-        raise ValueError("Invalid format {t}: '{i}' {p}".format(t=idtype, i=tid, p=repr(prog.pattern)))
-
-_validate_task_id = functools.partial(_validate_id, RX_TASK_ID, 'task id')
-_validate_task_option_id = functools.partial(_validate_id, RX_TASK_OPTION_ID,
-                                             'task option id')
-
-
-def to_opt_id(namespace, s):
-    return ".".join([namespace, "options", s])
-
-
-def validate_value(schema, v):
-    return jsonschema.validate(v, schema)
-
-
-def is_valid(schema, v):
-    """Returns a bool if the schema is valid"""
-    try:
-        validate_value(schema, v)
-        return True
-    except jsonschema.ValidationError:
-        pass
-    return False
-
-
-def validate_schema(f):
-    """Deco for validate the returned jsonschema against Draft 4 of the spec"""
-    def w(*args, **kwargs):
-        schema = f(*args, **kwargs)
-        _ = jsonschema.Draft4Validator(schema)
-        return schema
-    return w
-
-
-def to_option_schema(option_id, dtype_or_dtypes, display_name, description, default_value):
-    """
-    Simple util factory method
-    :param dtype_or_dtypes: single data type or list of data types
-    :param option_id: globally unique task option id. Must begin with
-    'pbsmrtpipe.task_options.'
-    :param display_name: display name of task options
-    :param description: Short description of the task options
-    :param required: Is the option required.
-    """
-    # annoying that you can't specify a tuple
-    if isinstance(dtype_or_dtypes, tuple):
-        dtype_or_dtypes = list(dtype_or_dtypes)
-
-    _validate_task_option_id(option_id)
-
-    # Steps toward moving away from JSON schema as the format, but reuse
-    # the jsonschema defined types. Only non-union types are supported.
-    pbd = {"option_id": option_id,
-           "type": dtype_or_dtypes,
-           "default": default_value,
-           "name": display_name,
-           "description": description}
-
-    d = {'$schema': "http://json-schema.org/draft-04/schema#",
-         'type': 'object',
-         'title': "JSON Schema for {o}".format(o=option_id),
-         'properties': {option_id: {'description': description,
-                                    'title': display_name,
-                                    'type': dtype_or_dtypes},
-                        },
-         "pb_option": pbd
-         }
-
-    d['required'] = [option_id]
-    d['properties'][option_id]['default'] = default_value
-    return d
-
-
 class PbParserBase(object):
 
     __metaclass__ = abc.ABCMeta
 
     def __init__(self, tool_id, version, name, description):
-        self.tool_id = _validate_task_id(tool_id)
+        self.tool_id = validate_task_id(tool_id)
         self.version = version
         self.description = description
         self.name = name
@@ -268,6 +176,54 @@ class PbParserBase(object):
         """
         raise NotImplementedError
 
+    @abc.abstractmethod
+    def add_choice_str(self, option_id, option_str, choices, name, description, default=None):
+        """
+        Add a generic enumerated argument whose type is a string.
+
+        :param option_id: fully-qualified option name used in tool contract
+                          layer, of form "pbcommand.task_options.my_option"
+        :param option_str: shorter parameter name, mainly used in Python
+                           argparse layer, but *without* leading dashes
+        :param choices: allowed values
+        :param name: plain-English name
+        :param description: help string
+        :param default: default value (if None, will use first choice)
+        """
+        raise NotImplementedError
+
+    @abc.abstractmethod
+    def add_choice_int(self, option_id, option_str, choices, name, description, default=None):
+        """
+        Add a generic enumerated argument whose type is an integer.
+
+        :param option_id: fully-qualified option name used in tool contract
+                          layer, of form "pbcommand.task_options.my_option"
+        :param option_str: shorter parameter name, mainly used in Python
+                           argparse layer, but *without* leading dashes
+        :param choices: allowed values
+        :param name: plain-English name
+        :param description: help string
+        :param default: default value (if None, will use first choice)
+        """
+        raise NotImplementedError
+
+    @abc.abstractmethod
+    def add_choice_float(self, option_id, option_str, choices, name, description, default=None):
+        """
+        Add a generic enumerated argument whose type is a float.
+
+        :param option_id: fully-qualified option name used in tool contract
+                          layer, of form "pbcommand.task_options.my_option"
+        :param option_str: shorter parameter name, mainly used in Python
+                           argparse layer, but *without* leading dashes
+        :param choices: allowed values
+        :param name: plain-English name
+        :param description: help string
+        :param default: default value (if None, will use first choice)
+        """
+        raise NotImplementedError
+
 _validate_argparse_int = functools.partial(_validate_option_or_cast, int)
 _validate_argparse_float = functools.partial(_validate_option_or_cast, float)
 _validate_argparse_bool = functools.partial(_validate_option_or_cast, bool)
@@ -335,6 +291,27 @@ class PyParser(PbParserBase):
         self.parser.add_argument(opt, action=d[_validate_argparse_bool(not default)],
                                  help=description)
 
+    def _add_choice_base(self, opt_type, option_id, option_str, choices, name,
+                         description, default=None):
+        if default is None:
+            default = choices[0]
+        opt = '--' + option_str
+        self.parser.add_argument(opt, action="store", choices=choices,
+                                 type=opt_type,
+                                 help=description, default=default)
+
+    def add_choice_str(self, option_id, option_str, choices, name, description,
+                       default=None):
+        return self._add_choice_base(str, option_id, option_str, choices, name, description, default)
+
+    def add_choice_int(self, option_id, option_str, choices, name, description,
+                       default=None):
+        return self._add_choice_base(int, option_id, option_str, choices, name, description, default)
+
+    def add_choice_float(self, option_id, option_str, choices, name, description,
+                         default=None):
+        return self._add_choice_base(float, option_id, option_str, choices, name, description, default)
+
 
 class ToolContractParser(PbParserBase):
     """Parser to support Emitting and running ToolContracts"""
@@ -346,6 +323,7 @@ class ToolContractParser(PbParserBase):
         super(ToolContractParser, self).__init__(tool_id, version, name, description)
         self.input_types = []
         self.output_types = []
+        # List of PacBioOption and subclasses of PacBioOption
         self.options = []
         self.driver = driver
         self.name = name
@@ -362,26 +340,39 @@ class ToolContractParser(PbParserBase):
         self.output_types.append(x)
 
     def add_int(self, option_id, option_str, default, name, description):
-        self.options.append(to_option_schema(option_id,
-                                             JsonSchemaTypes.INT, name, description,
-                                             _validate_option(int, default)))
+        self.options.append(PacBioIntOption(option_id, name, default, description))
 
     def add_float(self, option_id, option_str, default, name, description):
         if isinstance(default, int):
             default = float(default)
-        self.options.append(to_option_schema(option_id,
-                                             JsonSchemaTypes.NUM, name, description,
-                                             _validate_option(float, default)))
+        self.options.append(PacBioFloatOption(option_id, name, default, description))
 
     def add_str(self, option_id, option_str, default, name, description):
-        self.options.append(to_option_schema(option_id,
-                                             JsonSchemaTypes.STR, name, description,
-                                             _validate_option(str, default)))
+        self.options.append(PacBioStringOption(option_id, name, default, description))
 
     def add_boolean(self, option_id, option_str, default, name, description):
-        self.options.append(to_option_schema(option_id,
-                                             JsonSchemaTypes.BOOL, name, description,
-                                             _validate_option(bool, default)))
+        self.options.append(PacBioBooleanOption(option_id, name, default, description))
+
+    def add_choice_str(self, option_id, option_str, choices, name, description,
+                       default=None):
+        if default is None:
+            default = choices[0]
+
+        self.options.append(PacBioStringChoiceOption(option_id, name, default, description, choices))
+
+    def add_choice_int(self, option_id, option_str, choices, name, description,
+                       default=None):
+        if default is None:
+            default = choices[0]
+
+        self.options.append(PacBioIntChoiceOption(option_id, name, default, description, choices))
+
+    def add_choice_float(self, option_id, option_str, choices, name, description,
+                         default=None):
+        if default is None:
+            default = choices[0]
+
+        self.options.append(PacBioFloatChoiceOption(option_id, name, default, description, choices))
 
     def to_tool_contract(self):
         # Not a well formed tool contract, must have at least one input and
@@ -518,6 +509,21 @@ class PbParser(PbParserBase):
         args = option_id, option_str, default, name, description
         self._dispatch("add_boolean", args, {})
 
+    def add_choice_str(self, option_id, option_str, choices, name, description,
+                       default=None):
+        args = option_id, option_str, choices, name, description, default
+        self._dispatch("add_choice_str", args, {})
+
+    def add_choice_int(self, option_id, option_str, choices, name, description,
+                       default=None):
+        args = option_id, option_str, choices, name, description, default
+        self._dispatch("add_choice_int", args, {})
+
+    def add_choice_float(self, option_id, option_str, choices, name, description,
+                         default=None):
+        args = option_id, option_str, choices, name, description, default
+        self._dispatch("add_choice_float", args, {})
+
     def to_contract(self):
         return self.tool_contract_parser.to_tool_contract()
 
diff --git a/pbcommand/models/report.py b/pbcommand/models/report.py
index 5c9105c..bddf4a8 100644
--- a/pbcommand/models/report.py
+++ b/pbcommand/models/report.py
@@ -11,10 +11,12 @@ import logging
 import json
 import os
 import re
+import uuid as U  # to allow use of uuid as local var
 from pprint import pformat
+import datetime
+
+import pbcommand
 
-# make this optional. This is only for serialization
-import numpy as np
 
 log = logging.getLogger(__name__)
 
@@ -26,7 +28,9 @@ __all__ = ['PbReportError',
            'Column',
            'Table']
 
-import pbcommand
+# If/when the Report datamodel change, this needs to be changed using
+# the semver model
+PB_REPORT_SCHEMA_VERSION = "1.0.0"
 
 _HAS_NUMPY = False
 
@@ -68,9 +72,9 @@ def _get_decoder():
 def _to_json_with_decoder(d):
     decoder_or_none = _get_decoder()
     if decoder_or_none is None:
-        return json.dumps(d, sort_keys=True, indent=4)
+        return json.dumps(d, sort_keys=True, indent=4, separators=(',', ': '))
     else:
-        return json.dumps(d, cls=decoder_or_none, sort_keys=True, indent=4)
+        return json.dumps(d, cls=decoder_or_none, sort_keys=True, indent=4, separators=(',', ': '))
 
 
 class PbReportError(Exception):
@@ -82,10 +86,12 @@ class BaseReportElement(object):
 
     def __init__(self, id_):
         if not isinstance(id_, basestring):
-            raise PbReportError("Type error. id '{i}' cannot be {t}.".format(i=id_, t=type(id_)))
+            raise PbReportError(
+                "Type error. id '{i}' cannot be {t}.".format(i=id_, t=type(id_)))
 
         if not re.match('^[a-z0-9_]+$', id_):
-            msg = "id '{i}' for {x} must contain only lower-case alphanumeric or underscore characters".format(x=self.__class__.__name__, i=id_)
+            msg = "id '{i}' for {x} must contain only lower-case alphanumeric or underscore characters".format(
+                x=self.__class__.__name__, i=id_)
             log.error(msg)
             raise PbReportError(msg)
 
@@ -99,7 +105,8 @@ class BaseReportElement(object):
         :param id_: (int) id of child BaseReportElement
         """
         if id_ in self._ids:
-            msg = "a plot with id '{i}' has already been added to {t}.".format(i=id_, t=str(type(self)))
+            msg = "a plot with id '{i}' has already been added to {t}.".format(
+                i=id_, t=str(type(self)))
             log.error(msg)
             raise PbReportError(msg)
         self._ids.add(id_)
@@ -146,14 +153,6 @@ class BaseReportElement(object):
 
         d = {a: getattr(self, a) for a in self._get_attrs_simple()}
 
-        # Versioning
-        # import pbreports
-        # version = pbreports.get_version()
-        # changelist = pbreports.get_changelist()
-
-        # d['_version'] = version
-        # d['_changelist'] = changelist
-
         d['id'] = '.'.join([str(v) for v in id_parts])
         complex_attrs = self._get_attrs_complex_list()
 
@@ -187,8 +186,6 @@ class Attribute(BaseReportElement):
         :param name: (str, None) optional display name. Can be changed in portal display rules
         """
         BaseReportElement.__init__(self, id_)
-        if value is None:
-            raise PbReportError("value cannot be None. {n} given.".format(n=value))
         self._value = value
         self._name = name
 
@@ -279,12 +276,21 @@ class PlotGroup(BaseReportElement):
     def _get_attrs_complex_list(self):
         return ['plots']
 
+    def get_plot_by_id(self, id_):
+
+        for plot in self.plots:
+            if plot.id == id_:
+                return plot
+
+        return None
+
     def add_plot(self, plot):
         """
         Add a plot to the plotGroup
         """
         if not isinstance(plot, Plot):
-            raise TypeError("Unable to add plot. Got type {x} expect Plot".format(x=type(plot)))
+            raise TypeError(
+                "Unable to add plot. Got type {x} expect Plot".format(x=type(plot)))
         BaseReportElement.is_unique(self, plot.id)
         self._plots.append(plot)
 
@@ -303,12 +309,13 @@ class Plot(BaseReportElement):
     A plot contains a path to image file.
     """
 
-    def __init__(self, id_, image, caption=None, thumbnail=None):
+    def __init__(self, id_, image, caption=None, thumbnail=None, title=None):
         """
         :param id_: (str, not None, or empty) Unique id for plot.
         :param image: (str) Required - not None - path to image
         :param caption: (str, None) Plot caption displayed to user under plot.
         :param thumbnail: (str, None) thumbnail path
+        :param title: str Display Name of the Plot
 
         Paths must be given as relative
         """
@@ -320,6 +327,7 @@ class Plot(BaseReportElement):
 
         self._image = image
         self._caption = caption
+        self.title = title
         if thumbnail is not None:
             _validate_not_abs_path(thumbnail)
 
@@ -344,7 +352,7 @@ class Plot(BaseReportElement):
         return self._caption
 
     def _get_attrs_simple(self):
-        return ['image', 'caption']
+        return ['image', 'caption', 'title']
 
     def _get_attrs_complex_list(self):
         return []
@@ -381,14 +389,20 @@ class Table(BaseReportElement):
         max_values = max(len(column.values) for column in self.columns)
         # max length for each column value
         max_lengths = {}
+        headers = []
         for c in self.columns:
+            this_header = ""
+            if c.header is not None:
+                this_header = c.header
             if c.values:
-                n = max(max(len(str(v)) for v in c.values), len(c.header))
+                n = max(max(len(str(v)) for v in c.values), len(this_header))
             else:
-                n = len(c.header)
+                n=len(this_header)
             max_lengths[c] = n
+            headers.append(this_header)
 
-        header = "".join([c.header.ljust(max_lengths[c] + pad) for c in self.columns])
+        header="".join([h.ljust(max_lengths[c] + pad)
+                          for h in headers])
 
         outs = list()
         outs.append("")
@@ -433,6 +447,14 @@ class Table(BaseReportElement):
     def _get_attrs_complex_list(self):
         return ['columns']
 
+    def get_column_by_id(self, id_):
+
+        for col in self.columns:
+            if col.id == id_:
+                return col
+
+        return None
+
     def add_column(self, column):
         """
         Add a column to the table
@@ -440,7 +462,8 @@ class Table(BaseReportElement):
         :param column: (Column instance)
         """
         if not isinstance(column, Column):
-            raise TypeError("Got type {x}. Expected Column type.".format(x=type(column)))
+            raise TypeError(
+                "Got type {x}. Expected Column type.".format(x=type(column)))
 
         BaseReportElement.is_unique(self, column.id)
         self._columns.append(column)
@@ -457,7 +480,8 @@ class Table(BaseReportElement):
         if column_index < len(self._columns):
             self._columns[column_index].values.append(item)
         else:
-            raise IndexError("Unable to find index {i} in columns.".format(i=column_index))
+            raise IndexError(
+                "Unable to find index {i} in columns.".format(i=column_index))
 
     def add_data_by_column_id(self, column_id, value):
         """Add a value to column.
@@ -472,7 +496,8 @@ class Table(BaseReportElement):
                 if column_id == column.id:
                     column.values.append(value)
         else:
-            raise KeyError("Unable to Column with id '{i}' to assign value {v}".format(i=column_id, v=value))
+            raise KeyError("Unable to Column with id '{i}' to assign value {v}".format(
+                i=column_id, v=value))
 
     @staticmethod
     def merge(tables):
@@ -560,7 +585,7 @@ class Report(BaseReportElement):
     It can be serialized to json.
     """
 
-    def __init__(self, id_, title=None, tables=(), attributes=(), plotgroups=(), dataset_uuids=()):
+    def __init__(self, id_, title=None, tables=(), attributes=(), plotgroups=(), dataset_uuids=(), uuid=None):
         """
         :param id_: (str) Should be a string that identifies the report, like 'adapter'.
         :param title: Display name of report Defaults to the Report+id if None (added in 0.3.9)
@@ -568,12 +593,16 @@ class Report(BaseReportElement):
         :param attributes: (list of attribute instances)
         :param plotgroups: (list of plot group instances)
         :param dataset_uuids: list[string] DataSet uuids of files used to generate the report
+        :param uuid: the unique identifier for the Report
         """
         BaseReportElement.__init__(self, id_)
         self._attributes = []
         self._plotgroups = []
         self._tables = []
         self.title = "Report {i}".format(i=self.id) if title is None else title
+        # FIXME(mkocher)(2016-3-30) Add validation to make sure it's a well formed value
+        # this needs to be required
+        self.uuid = uuid if uuid is not None else str(U.uuid4())
 
         if tables:
             for table in tables:
@@ -588,12 +617,17 @@ class Report(BaseReportElement):
         # Datasets that
         self._dataset_uuids = dataset_uuids
 
+    @property
+    def dataset_uuids(self):
+        return self._dataset_uuids
+
     def add_attribute(self, attribute):
         """Add an attribute to the report
         :param attribute: (Attribute instance)
         """
         if not isinstance(attribute, Attribute):
-            TypeError("Got type {x}. Expected Attribute type.".format(x=type(attribute)))
+            TypeError("Got type {x}. Expected Attribute type.".format(
+                x=type(attribute)))
 
         BaseReportElement.is_unique(self, attribute.id)
         self._attributes.append(attribute)
@@ -603,7 +637,8 @@ class Report(BaseReportElement):
         Add a plotgroup to the report
         """
         if not isinstance(plotgroup, PlotGroup):
-            TypeError("Got type {x}. Expected Attribute type.".format(x=type(plotgroup)))
+            TypeError("Got type {x}. Expected Attribute type.".format(
+                x=type(plotgroup)))
 
         BaseReportElement.is_unique(self, plotgroup.id)
         self._plotgroups.append(plotgroup)
@@ -621,8 +656,8 @@ class Report(BaseReportElement):
                   n=self.title,
                   a=len(self.attributes),
                   p=len(self.plotGroups),
-                  t=len(self.tables))
-        return "<{k} id:{i} title:{n} nattributes:{a} nplot_groups:{p} ntables:{t} >".format(**_d)
+                  t=len(self.tables), u=self.uuid)
+        return "<{k} id:{i} title:{n} uuid:{u} nattributes:{a} nplot_groups:{p} ntables:{t} >".format(**_d)
 
     @property
     def attributes(self):
@@ -659,28 +694,55 @@ class Report(BaseReportElement):
 
         return None
 
+    def get_table_by_id(self, id_):
+
+        for table in self.tables:
+            if table.id == id_:
+                return table
+
+        return None
+
+    def get_plotgroup_by_id(self, id_):
+
+        for pg in self.plotGroups:
+            if pg.id == id_:
+                return pg
+
+        return None
+
     def to_dict(self, id_parts=None):
-        version = pbcommand.get_version()
+
+        _d = dict(v=pbcommand.get_version(),
+                  t=datetime.datetime.now().isoformat())
 
         d = BaseReportElement.to_dict(self, id_parts=id_parts)
+        d['_comment'] = "Generated with pbcommand version {v} at {t}".format(**_d)
+
+        # Required in 1.0.0 of the spec
+        d['uuid'] = self.uuid
         d['title'] = self.title
-        d['_version'] = version
-        d['_changelist'] = "UNKNOWN"
-        d['dataset_uuids'] = list(set(self._dataset_uuids))
+        d['version'] = PB_REPORT_SCHEMA_VERSION
+        d['dataset_uuids'] = list(set(self.dataset_uuids))
         return d
 
     def to_json(self):
         """Return a json string of the report"""
+
+        from pbcommand.schemas import validate_pbreport
+
         try:
             s = _to_json_with_decoder(self.to_dict())
+            # FIXME(mkocher)(2016-6-20) Enable schema validation
+            # this needs to be processed by the decoder, then validate the
+            # dict
+            # _ = validate_pbreport(json.loads(s))
+            return s
         except TypeError as e:
             msg = "Unable to serialize report due to {e} \n".format(e=e)
             log.error(msg)
             log.error("Object: " + pformat(self.to_dict()))
             raise
 
-        return s
-
     def write_json(self, file_name):
         """
         Serialized the report to a json file.
@@ -765,7 +827,7 @@ class Report(BaseReportElement):
             assert report.id == report_id
             attr_list.append(report.attributes)
             table_list.extend(report.tables)
-            dataset_uuids.update(set(report._dataset_uuids))
+            dataset_uuids.update(set(report.dataset_uuids))
         table = _attributes_to_table(attr_list, 'chunk_metrics',
                                      "Chunk Metrics")
         tables = _merge_tables(table_list)
@@ -773,3 +835,323 @@ class Report(BaseReportElement):
         merged_attributes = _sum_attributes(attr_list)
         return Report(report_id, attributes=merged_attributes, tables=tables,
                       dataset_uuids=sorted(list(dataset_uuids)))
+
+
+########################################################################
+# SPECIFICATION MODELS
+
+FS_RE = "{([GMkp]{0,1})(:)([,]{0,1})([\.]{0,1})([0-9]*)([dfg]{1})}(.*)$"
+def validate_format(format_str):
+    m = re.match(FS_RE, format_str)
+    if m is None:
+         raise ValueError("Format string '{s}' is uninterpretable".format(
+                          s=format_str))
+    return m
+
+def format_metric(format_str, value):
+    """
+    Format a report metric (attribute or table column value) according to our
+    in-house rules.  These resemble Python format strings (plus optional
+    suffix), but with the addition of optional scaling flags.
+    """
+    if value is None:
+        return "NA"
+    elif format_str is None:
+        return str(value)
+    else:
+        m = validate_format(format_str)
+        if m.groups()[0] == 'p':
+            value *= 100.0
+        elif m.groups()[0] == 'G':
+            value /= 1000000000.0
+        elif m.groups()[0] == 'M':
+            value /= 1000000.0
+        elif m.groups()[0] == 'k':
+            value /= 1000.0
+        if isinstance(value, float) and m.groups()[5] == 'd':
+            value = int(value)
+        fs_python = "{{:{:s}{:s}{:s}{:s}}}".format(*(m.groups()[2:6]))
+        formatted = fs_python.format(value)
+        # the percent symbol can be implicit
+        if m.groups()[0] == 'p' and m.groups()[-1] == '':
+            return formatted + "%"
+        else:
+            return formatted + m.groups()[-1]
+
+
+# FIXME this needs to be standardized
+DATA_TYPES = {
+    "int": int,
+    "long": int,
+    "float": float,
+    "string": basestring, # this is hacky too
+    "boolean": bool
+}
+
+class AttributeSpec(object):
+
+    def __init__(self, id_, name, description, type_, format_=None,
+                 is_hidden=False):
+        self.id = id_
+        self.name = name
+        self.description = description
+        self._type = type_
+        self.format_str = format_
+        self.is_hidden = is_hidden
+
+    @property
+    def type(self):
+        return DATA_TYPES[self._type]
+
+    @staticmethod
+    def from_dict(d):
+        format_str = d.get("format", None)
+        if format_str is not None:
+            validate_format(format_str)
+        assert d["type"] in DATA_TYPES, d["type"]
+        return AttributeSpec(d['id'].split(".")[-1], d['name'],
+                             d['description'], d["type"], format_str,
+                             d.get("isHidden", False))
+
+    def validate_attribute(self, attr):
+        assert attr.id == self.id
+        if attr.value is not None and not isinstance(attr.value, self.type):
+            msg = "Attribute {i} has value of type {v} (expected {t})".format(i=self.id, v=type(attr.value).__name__, t=self.type)
+            raise TypeError(msg)
+
+
+class ColumnSpec(object):
+    def __init__(self, id_, header, description, type_, format_=None,
+                 is_hidden=False):
+        self.id = id_
+        self.header = header
+        self.description = description
+        self._type = type_
+        self.format_str = format
+        self.is_hidden = is_hidden
+
+    @property
+    def type(self):
+        return DATA_TYPES[self._type]
+
+    @staticmethod
+    def from_dict(d):
+        format_str = d.get("format", None)
+        if format_str is not None:
+            validate_format(format_str)
+        assert d["type"] in DATA_TYPES, d["type"]
+        return ColumnSpec(d['id'].split(".")[-1], d['header'],
+                          d['description'], d["type"], format_str,
+                          d.get("isHidden", False))
+
+    def validate_column(self, col):
+        assert col.id == self.id
+        for value in col.values:
+            if value is not None and not isinstance(value, self.type):
+                msg = "Column {i} contains value of type {v} (expected {t})".format(i=self.id, v=type(value).__name__, t=self.type)
+                if isinstance(value, int) and self._type == "float":
+                    warnings.warn(msg)
+                else:
+                    raise TypeError(msg)
+
+
+class TableSpec(object):
+
+    def __init__(self, id_, title, description, columns):
+        self.id = id_
+        self.title = title
+        self.description = description
+        self.columns = columns
+        self._col_dict = {c.id: c for c in columns}
+
+    @staticmethod
+    def from_dict(d):
+        return TableSpec(d['id'].split(".")[-1], d['title'], d['description'],
+                         [ColumnSpec.from_dict(c) for c in d['columns']])
+
+    def get_column_spec(self, id_):
+        return self._col_dict.get(id_, None)
+
+
+class PlotSpec(object):
+
+    def __init__(self, id_, description, caption, title, xlabel, ylabel):
+        self.id = id_
+        self.description = description
+        self.caption = caption
+        self.title = title
+        self.xlabel = xlabel
+        self.ylabel = ylabel
+
+    @staticmethod
+    def from_dict(d):
+        return PlotSpec(d['id'].split(".")[-1], d['description'],
+                        d['caption'], d['title'],
+                        d.get('xlabel', None), d.get('ylabel', None))
+
+
+class PlotGroupSpec(object):
+
+    def __init__(self, id_, title, description, legend, plots=()):
+        self.id = id_
+        self.title = title
+        self.description = description
+        self.legend = legend
+        self.plots = plots
+        self._plot_dict = {p.id: p for p in plots}
+
+    @staticmethod
+    def from_dict(d):
+        return PlotGroupSpec(d['id'].split(".")[-1], d['title'],
+                             d["description"], d['legend'],
+                             [PlotSpec.from_dict(p) for p in d['plots']])
+
+
+    def get_plot_spec(self, id_):
+        return self._plot_dict.get(id_, None)
+
+
+class ReportSpec(object):
+    """
+    Model for a specification of the expected content of a uniquely
+    identified report.  For obvious reasons this mirrors the Report model,
+    minus values and with added view metadata.  These specs should usually
+    be written out explicitly in JSON rather than built programatically.
+    """
+
+    def __init__(self, id_, version, title, description, attributes=(),
+                 plotgroups=(), tables=()):
+        self.id = id_
+        self.version = version
+        self.title = title
+        self.description = description
+        self.attributes = attributes
+        self.plotgroups = plotgroups
+        self.tables = tables
+        self._attr_dict = {a.id: a for a in attributes}
+        self._plotgrp_dict = {p.id: p for p in plotgroups}
+        self._table_dict = {t.id: t for t in tables}
+
+    @staticmethod
+    def from_dict(d):
+        return ReportSpec(d['id'], d['version'], d['title'], d['description'],
+                          [AttributeSpec.from_dict(a)
+                           for a in d['attributes']],
+                          [PlotGroupSpec.from_dict(p)
+                           for p in d['plotGroups']],
+                          [TableSpec.from_dict(t) for t in d['tables']])
+
+    def get_attribute_spec(self, id_):
+        return self._attr_dict.get(id_, None)
+
+    def get_plotgroup_spec(self, id_):
+        return self._plotgrp_dict.get(id_, None)
+
+    def get_table_spec(self, id_):
+        return self._table_dict.get(id_, None)
+
+    def validate_report(self, rpt):
+        """
+        Check that a generated report corresponding to this spec is compliant
+        with the expected types and object IDs.  (Missing objects will not
+        result in an error, but unexpected object IDs will.)
+        """
+        assert rpt.id == self.id
+        # TODO check version?
+        errors = []
+        for attr in rpt.attributes:
+            attr_spec = self.get_attribute_spec(attr.id)
+            if attr_spec is None:
+                errors.append("Attribute {i} not found in spec".format(
+                              i=attr.id))
+            else:
+                try:
+                    attr_spec.validate_attribute(attr)
+                except TypeError as e:
+                    errors.append(str(e))
+                try:
+                    format_metric(attr_spec.format_str, attr.value)
+                except (ValueError, TypeError) as e:
+                    log.error(e)
+                    errors.append("Couldn't format {i}: {e}".format(
+                                  i=attr.id, e=str(e)))
+        for table in rpt.tables:
+            table_spec = self.get_table_spec(table.id)
+            if table_spec is None:
+                errors.append("Table {i} not found in spec".format(i=table.id))
+            else:
+                for column in table.columns:
+                    column_spec = table_spec.get_column_spec(column.id)
+                    if column_spec is None:
+                        errors.append("Column {i} not found in spec".format(
+                                      i=column.id))
+                    else:
+                        try:
+                            column_spec.validate_column(column)
+                        except TypeError as e:
+                            errors.append(str(e))
+        for pg in rpt.plotGroups:
+            pg_spec = self.get_plotgroup_spec(pg.id)
+            if pg_spec is None:
+                errors.append("Plot group {i} not found in spec".format(
+                              i=pg.id))
+            else:
+                for plot in pg.plots:
+                    plot_spec = pg_spec.get_plot_spec(plot.id)
+                    # FIXME how should we handle plots with variable IDs?
+                    # maybe let the title/caption vary and keep the ID
+                    # constant?
+                    if plot_spec is None:
+                        warnings.warn("Plot {i} not found in spec".format(
+                                      i=plot.id))
+                    #    errors.append("Plot {i} not found in spec".format(
+                    #                  i=plot.id))
+        if len(errors) > 0:
+            raise ValueError(
+                "Report {i} failed validation against spec:\n{e}".format(
+                i=self.id, e="\n".join(errors)))
+        return rpt
+
+    def is_valid_report(self, rpt):
+        """
+        Returns True if report passes spec validation.
+        """
+        try:
+            rpt = self.validate_report(rpt)
+            return True
+        except ValueError:
+            return False
+
+    def apply_view(self, rpt, force=False):
+        """
+        Propagate view metadata (i.e. labels) to a Report object corresponding to
+        this spec.
+        """
+        assert rpt.id == self.id
+        for attr in rpt.attributes:
+            attr_spec = self.get_attribute_spec(attr.id)
+            if force or attr.name in [None, ""]:
+                attr._name = attr_spec.name
+        for table in rpt.tables:
+            table_spec = self.get_table_spec(table.id)
+            if force or table.title in [None, ""]:
+                table._title = table_spec.title
+            for col in table.columns:
+                col_spec = table_spec.get_column_spec(col.id)
+                if force or col.header in [None, ""]:
+                    col._header = col_spec.header
+        for pg in rpt.plotGroups:
+            pg_spec = self.get_plotgroup_spec(pg.id)
+            if force or pg.title in [None, ""]:
+                pg._title = pg_spec.title
+            for plot in pg.plots:
+                plot_spec = pg_spec.get_plot_spec(plot.id)
+                # FIXME see comment above - maybe we just need to repeat IDs?
+                if plot_spec is not None:
+                    if force or plot.title in [None, ""]:
+                        plot.title = plot_spec.title
+                    if force or plot.caption in [None, ""]:
+                        plot._caption = plot_spec.caption
+                else:
+                    pass #warnings.warn("Can't find spec for {i}".format(i=plot.id))
+        return rpt
diff --git a/pbcommand/models/tool_contract.py b/pbcommand/models/tool_contract.py
index 4929ebd..d82716a 100644
--- a/pbcommand/models/tool_contract.py
+++ b/pbcommand/models/tool_contract.py
@@ -4,6 +4,9 @@
 Author: Michael Kocher
 """
 import abc
+from collections import OrderedDict
+import types
+import datetime
 
 import pbcommand
 
@@ -31,6 +34,18 @@ def _validate_or_raise(value, type_or_types):
     return value
 
 
+def _validate_list_of_or_raise(a_list, t):
+    """Validates a List of items of a specific type"""
+    if not isinstance(a_list, (list, tuple)):
+        raise TypeError("Expected list, got {t}".format(t=type(a_list)))
+
+    for item in a_list:
+        if not isinstance(item, t):
+            raise TypeError("Expected type {t}, Got {x}".format(t=t, x=type(item)))
+
+    return a_list
+
+
 def _is_empty_list(alist):
     return len(alist) == 0
 
@@ -150,7 +165,7 @@ class ToolContractTask(object):
 
     TASK_TYPE_ID = TaskTypes.STANDARD
 
-    def __init__(self, task_id, name, description, version, is_distributed, input_types, output_types, tool_options, nproc, resources):
+    def __init__(self, task_id, name, description, version, is_distributed, input_types, output_types, options, nproc, resources):
         """
         Core metadata for a commandline task
 
@@ -158,15 +173,18 @@ class ToolContractTask(object):
         :type task_id: str
         :param name: Display name of your
         :param description: Short description of your tool
-        :param version: semantic style versioning
+        :param version: semantic style version string
         :param is_distributed: If the task will be run locally or not
         :param is_distributed: bool
         :param input_types: list[FileType]
         :param output_types:
-        :param tool_options:
+        :param options: list of PacBioOption instances
         :param nproc:
         :param resources:
-        :return:
+
+
+        :type tool_options: list[PacBioOption]
+
         """
         self.task_id = task_id
         self.name = name
@@ -176,8 +194,7 @@ class ToolContractTask(object):
         self.input_file_types = input_types
         self.output_file_types = output_types
         # This needs to be list
-        # self.options = _validate_or_raise(tool_options, (list, tuple))
-        self.options = tool_options
+        self.options = _validate_or_raise(options, types.ListType)
         self.nproc = nproc
         # List of ResourceTypes
         self.resources = resources
@@ -187,8 +204,12 @@ class ToolContractTask(object):
         return "<{k} id:{i} {n} >".format(**_d)
 
     def to_dict(self):
-        # this is a little hack to get around some sloppyness in the datamodel
-        opts = self.options if self.options else []
+        opts = [x.to_dict() for x in self.options]
+
+        # for debugging, but creates too much chatter for production
+        # now = " " + str(datetime.datetime.now())
+        now = ""
+        comment = "Created by pbcommand {v}".format(v=__version__, n=now) + str(now)
 
         _t = dict(tool_contract_id=self.task_id,
                   input_types=[i.to_dict() for i in self.input_file_types],
@@ -200,7 +221,7 @@ class ToolContractTask(object):
                   schema_options=opts,
                   nproc=self.nproc,
                   resource_types=self.resources,
-                  _comment="Created by v{v}".format(v=__version__))
+                  _comment=comment)
         return _t
 
 
@@ -235,7 +256,11 @@ class GatherToolContractTask(ToolContractTask):
 
 class ToolContract(object):
 
-    def __init__(self, task, driver):
+    # Calling to_dict will always generate a compliant version with this
+    # spec
+    WRITER_SCHEMA_VERSION = "2.0.0"
+
+    def __init__(self, task, driver, schema_version=WRITER_SCHEMA_VERSION):
         """
 
         :type task: ToolContractTask | ScatterToolContractTask | GatherToolContractTask
@@ -247,6 +272,7 @@ class ToolContract(object):
         """
         self.task = task
         self.driver = driver
+        self.schema_version = schema_version
 
     def __repr__(self):
         _d = dict(k=self.__class__.__name__, i=self.task.task_id, t=self.task.is_distributed)
@@ -259,7 +285,9 @@ class ToolContract(object):
         _d = dict(version=self.task.version,
                   tool_contract_id=self.task.task_id,
                   driver=self.driver.to_dict(),
-                  tool_contract=_t)
+                  tool_contract=_t,
+                  schema_version=self.WRITER_SCHEMA_VERSION)
+
         return _d
 
 
@@ -374,3 +402,28 @@ class ResolvedToolContract(object):
     def to_dict(self):
         return dict(resolved_tool_contract=self.task.to_dict(),
                     driver=self.driver.to_dict())
+
+
+class PipelinePreset(object):
+
+    def __init__(self, options, task_options, pipeline_id,
+                 preset_id, name, description):
+        self.options = options
+        self.task_options = task_options
+        self.pipeline_id = pipeline_id
+        self.preset_id = preset_id
+        self.name = name
+        self.description = description
+
+    def __repr__(self):
+        _d = dict(k=self.__class__.__name__) #self.to_dict()
+        return "<{k} >".format(**_d)
+
+    def to_dict(self):
+        return OrderedDict([
+            ("pipelineId", self.pipeline_id),
+            ("presetId", self.preset_id),
+            ("name", self.name),
+            ("description", self.description),
+            ("options", dict(self.options)),
+            ("taskOptions", dict(self.task_options))])
diff --git a/pbcommand/pb_io/__init__.py b/pbcommand/pb_io/__init__.py
index e75af62..50fe468 100644
--- a/pbcommand/pb_io/__init__.py
+++ b/pbcommand/pb_io/__init__.py
@@ -1,8 +1,12 @@
-from .report import load_report_from_json
+from .report import load_report_from_json, load_report_spec_from_json
 from .tool_contract_io import (load_tool_contract_from,
                                load_resolved_tool_contract_from,
+                               load_pipeline_presets_from,
                                write_resolved_tool_contract,
                                write_tool_contract,
                                write_resolved_tool_contract_avro,
                                write_tool_contract_avro)
-from .common import load_pipeline_chunks_from_json, write_pipeline_chunks
+from .common import (load_pipeline_chunks_from_json, write_pipeline_chunks,
+                     load_pipeline_datastore_view_rules_from_json,
+                     pacbio_option_from_dict)
+from .conditions import load_reseq_conditions_from
diff --git a/pbcommand/pb_io/common.py b/pbcommand/pb_io/common.py
index 9459c22..f92580b 100644
--- a/pbcommand/pb_io/common.py
+++ b/pbcommand/pb_io/common.py
@@ -1,8 +1,15 @@
 import logging
 import json
 import sys
+import warnings
 
-from pbcommand.models import PipelineChunk
+from pbcommand.models import (PipelineChunk, PipelineDataStoreViewRules,
+                              TaskOptionTypes, PacBioFloatChoiceOption,
+                              PacBioStringChoiceOption,
+                              PacBioIntChoiceOption, PacBioStringOption,
+                              PacBioFloatOption, PacBioBooleanOption,
+                              PacBioIntOption)
+from pbcommand.schemas import validate_datastore_view_rules
 
 log = logging.getLogger(__name__)
 
@@ -16,7 +23,7 @@ def write_pipeline_chunks(chunks, output_json_file, comment):
         _d['_comment'] = comment
 
     with open(output_json_file, 'w') as f:
-        f.write(json.dumps(_d, indent=4))
+        f.write(json.dumps(_d, indent=4, separators=(',', ': ')))
 
     log.debug("Write {n} chunks to {o}".format(n=len(chunks), o=output_json_file))
 
@@ -43,3 +50,105 @@ def load_pipeline_chunks_from_json(path):
         msg = "Unable to load pipeline chunks from {f}".format(f=path)
         sys.stderr.write(msg + "\n")
         raise
+
+
+def load_pipeline_datastore_view_rules_from_json(path):
+    """Load pipeline presets from dict"""
+    with open(path, 'r') as f:
+        d = json.loads(f.read())
+        validate_datastore_view_rules(d)
+        return PipelineDataStoreViewRules.from_dict(d)
+
+
+def _pacbio_choice_option_from_dict(d):
+    """
+    Factory/dispatch method for returning a PacBio Choice Option Type
+
+    :rtype: PacBioOption
+    """
+    choices = d['choices']
+    default_value = d['default']
+    # this will immediately raise
+    option_type_id = TaskOptionTypes.from_choice_str(d['optionTypeId'])
+
+    opt_id = d['id']
+    name = d['name']
+    desc = d['description']
+
+    klass_map = {TaskOptionTypes.CHOICE_STR: PacBioStringChoiceOption,
+                 TaskOptionTypes.CHOICE_FLOAT: PacBioFloatChoiceOption,
+                 TaskOptionTypes.CHOICE_INT: PacBioIntChoiceOption}
+
+    k = klass_map[option_type_id]
+
+    # Sanitize Unicode hack
+    if k is PacBioStringChoiceOption:
+        default_value = default_value.encode('ascii', 'ignore')
+        choices = [i.encode('ascii', 'ignore') for i in choices]
+
+    opt = k(opt_id, name, default_value, desc, choices)
+
+    return opt
+
+
+def __simple_option_by_type(option_id, name, default, description, option_type_id):
+
+    option_type = TaskOptionTypes.from_simple_str(option_type_id)
+
+    klass_map = {TaskOptionTypes.INT: PacBioIntOption,
+                 TaskOptionTypes.FLOAT: PacBioFloatOption,
+                 TaskOptionTypes.STR: PacBioStringOption,
+                 TaskOptionTypes.BOOL: PacBioBooleanOption}
+
+    k = klass_map[option_type]
+
+    # This requires a hack for the unicode to ascii for string option type.
+    if k is PacBioStringOption:
+        # sanitize unicode
+        default = default.encode('ascii', 'ignore')
+
+    opt = k(option_id, name, default, description)
+    return opt
+
+
+def _pacbio_legacy_option_from_dict(d):
+    """
+    Load the legacy (jsonschema-ish format)
+
+    Note, choice types are not supported here.
+
+    :rtype: PacBioOption
+    """
+    warnings.warn("This is obsolete and will disappear soon", DeprecationWarning)
+
+    opt_id = d['pb_option']['option_id']
+    name = d['pb_option']['name']
+    default = d['pb_option']['default']
+    desc = d['pb_option']['description']
+    option_type_id = d['pb_option']['type'].encode('ascii')
+
+    # Hack to support "number"
+    if option_type_id == "number":
+        option_type_id = "float"
+
+    return __simple_option_by_type(opt_id, name, default, desc, option_type_id)
+
+
+def _pacbio_option_from_dict(d):
+    if "pb_option" in d:
+        return _pacbio_legacy_option_from_dict(d)
+    else:
+        return __simple_option_by_type(d['id'], d['name'], d['default'], d['description'], d['optionTypeId'])
+
+
+def pacbio_option_from_dict(d):
+    """Fundamental API for loading any PacBioOption type from a dict """
+    # This should probably be pushed into pbcommand/pb_io/* for consistency
+    # Extensions are supported by adding a dispatch method by looking for required
+    # key(s) in the dict.
+    if "choices" in d and d.get('choices') is not None:
+        # the None check is for the TCs that are non-choice based models, but
+        # were written with "choices" key
+        return _pacbio_choice_option_from_dict(d)
+    else:
+        return _pacbio_option_from_dict(d)
diff --git a/pbcommand/pb_io/conditions.py b/pbcommand/pb_io/conditions.py
new file mode 100644
index 0000000..a4f13df
--- /dev/null
+++ b/pbcommand/pb_io/conditions.py
@@ -0,0 +1,53 @@
+import json
+import os
+
+from pbcommand.models import ReseqConditions, ReseqCondition
+
+
+def _resolve_conditions(cs, path):
+    """
+    :type cs: ReseqConditions
+    :rtype: ReseqConditions
+    """
+
+    def _resolve_if(p):
+        if os.path.isabs(p):
+            return p
+        else:
+            return os.path.join(path, p)
+
+    rconditions = []
+    for c in cs.conditions:
+        s = _resolve_if(c.subreadset)
+        a = _resolve_if(c.alignmentset)
+        r = _resolve_if(c.referenceset)
+        rc = ReseqCondition(c.cond_id, s, a, r)
+        rconditions.append(rc)
+
+    return cs._replace(conditions=rconditions)
+
+
+def load_reseq_conditions_from(json_file_or_dict):
+    """
+    Load resequencing conditions from JSON file or str
+
+    :param json_file_or_dict:
+
+    :rtype: ReseqConditions
+    """
+
+    # refactor that common usage from TC io
+    if isinstance(json_file_or_dict, dict):
+        d = json_file_or_dict
+    else:
+        with open(json_file_or_dict, 'r') as f:
+            d = json.loads(f.read())
+
+    cs = ReseqConditions.from_dict(d)
+
+    # Resolve
+    if isinstance(json_file_or_dict, basestring):
+        dir_name = os.path.dirname(os.path.abspath(json_file_or_dict))
+        return _resolve_conditions(cs, dir_name)
+    else:
+        return cs
diff --git a/pbcommand/pb_io/report.py b/pbcommand/pb_io/report.py
index 3037232..3e0c8fc 100644
--- a/pbcommand/pb_io/report.py
+++ b/pbcommand/pb_io/report.py
@@ -4,12 +4,12 @@ This manual marshalling/de-marshalling is not awesome.
 """
 import json
 import logging
+import uuid as U
 
 from pbcommand.models.report import (Report, Plot, PlotGroup, Attribute,
-                                     Table, Column)
+                                     Table, Column, ReportSpec)
+from pbcommand.schemas import validate_report, validate_report_spec
 
-SUPPORTED_VERSIONS = ('2.1', '2.2', '2.3')
-_DEFAULT_VERSION = '2.1'  # before the version was officially added
 
 log = logging.getLogger(__name__)
 
@@ -28,7 +28,8 @@ def _to_plot(d):
     caption = d.get('caption', None)
     image = d['image']
     thumbnail = d.get('thumbnail', None)
-    p = Plot(id_, image, caption=caption, thumbnail=thumbnail)
+    title = d.get('title', None)
+    p = Plot(id_, image, caption=caption, thumbnail=thumbnail, title=title)
     return p
 
 
@@ -80,20 +81,19 @@ def _to_table(d):
 
 
 def dict_to_report(dct):
-    # MK. We'll revisit this at some point.
-    # if '_version' in dct:
-    #     version = dct['_version']
-    #     if version not in SUPPORTED_VERSIONS:
-    #         # should this raise an exception?
-    #         log.warn("{v} is an unsupported version. Supported versions {vs}".format(v=version, vs=SUPPORTED_VERSIONS))
+    # FIXME. Add support for different version schemas in a cleaner, more
+    # concrete manner.
 
     report_id = dct['id']
 
+    # Make this optional for now
+    report_uuid = dct.get('uuid', str(U.uuid4()))
+
+    # Make sure the UUID is well formed
+    _ = U.UUID(report_uuid)
+
     # Legacy Reports > 0.3.9 will not have the title key
-    if 'title' in dct:
-        title = dct['title']
-    else:
-        title = "Report {i}".format(i=report_id)
+    title = dct.get('title', "Report {i}".format(i=report_id))
 
     plot_groups = []
     if 'plotGroups' in dct:
@@ -116,7 +116,8 @@ def dict_to_report(dct):
                     plotgroups=plot_groups,
                     tables=tables,
                     attributes=attributes,
-                    dataset_uuids=dct.get('dataset_uuids', ()))
+                    dataset_uuids=dct.get('dataset_uuids', ()),
+                    uuid=report_uuid)
 
     return report
 
@@ -138,3 +139,11 @@ def _to_report(nfiles, attribute_id, report_id):
 
 def fofn_to_report(nfofns):
     return _to_report(nfofns, "nfofns", "fofn_report")
+
+
+def load_report_spec_from_json(json_file, validate=True):
+    with open(json_file, 'r') as f:
+        d = json.loads(f.read())
+        if validate:
+            validate_report_spec(d)
+        return ReportSpec.from_dict(d)
diff --git a/pbcommand/pb_io/tool_contract_io.py b/pbcommand/pb_io/tool_contract_io.py
index aa497df..ab724e0 100644
--- a/pbcommand/pb_io/tool_contract_io.py
+++ b/pbcommand/pb_io/tool_contract_io.py
@@ -6,14 +6,14 @@ from avro.io import DatumWriter
 
 import pbcommand
 
-from pbcommand.schemas import RTC_SCHEMA, TC_SCHEMA
+from pbcommand.schemas import RTC_SCHEMA, TC_SCHEMA, validate_presets
 from pbcommand.models import (TaskTypes,
                               GatherToolContractTask,
                               ScatterToolContractTask,
                               MalformedToolContractError,
                               MalformedResolvedToolContractError,
                               validate_tool_contract)
-
+from pbcommand.pb_io.common import pacbio_option_from_dict
 from pbcommand.models.tool_contract import (ToolDriver,
                                             ToolContractTask,
                                             ToolContract,
@@ -23,12 +23,14 @@ from pbcommand.models.tool_contract import (ToolDriver,
                                             OutputFileType,
                                             ResolvedScatteredToolContractTask,
                                             ResolvedGatherToolContractTask,
-                                            ToolContractResolvedResource)
+                                            ToolContractResolvedResource,
+                                            PipelinePreset)
 
 log = logging.getLogger(__name__)
 
 __all__ = ['load_resolved_tool_contract_from',
            'load_tool_contract_from',
+           'load_pipeline_presets_from',
            'write_tool_contract',
            'write_resolved_tool_contract']
 
@@ -83,6 +85,9 @@ def __core_resolved_tool_contract_task_from_d(d):
     def _get(attr_name):
         return d[Constants.RTOOL][attr_name]
 
+    def _get_or(attr_name, default_value):
+        return d[Constants.RTOOL].get(attr_name, default_value)
+
     def _get_ascii(x_):
         return _to_a(_get(x_))
 
@@ -97,7 +102,8 @@ def __core_resolved_tool_contract_task_from_d(d):
     tool_options = _get("options")
     # int
     nproc = _get("nproc")
-    log_level = _get("log_level")
+    # allow for backward compatibility
+    log_level = _get_or("log_level", "INFO")
 
     resource_types = [ToolContractResolvedResource.from_d(dx) for dx in _get("resources")]
 
@@ -194,12 +200,17 @@ def load_resolved_tool_contract_from(path_or_d):
 
 @_json_path_or_d
 def __core_tool_contract_task_from(d):
-    def _to_a(x):
-        return x.encode('ascii', 'ignore')
+
+    if Constants.TOOL not in d:
+        raise MalformedResolvedToolContractError("Unable to find root key {k}. Keys {a}".format(k=Constants.TOOL, a=d.keys()))
+
+    def _to_a(x_):
+        return x_.encode('ascii', 'ignore')
 
     def _get(x_):
+        # Get a Subkey within
         if x_ not in d[Constants.TOOL]:
-            raise MalformedToolContractError("Unable to find key '{x}'".format(x=x_))
+            raise MalformedToolContractError("Unable to find subkey '{x}' within key '{i}'".format(x=x_, i=Constants.TOOL))
         return d[Constants.TOOL][x_]
 
     def _get_or(x_, default):
@@ -228,7 +239,9 @@ def __core_tool_contract_task_from(d):
 
     input_types = [_to_in_ft(x) for x in _get("input_types")]
     output_types = [_to_out_ft(x) for x in _get("output_types")]
-    tool_options = _get("schema_options")
+
+    tool_options = [pacbio_option_from_dict(opt_d) for opt_d in _get("schema_options")]
+
     nproc = _get("nproc")
     resource_types = _get("resource_types")
     return task_id, display_name, description, version, is_distributed, input_types, output_types, tool_options, nproc, resource_types
@@ -237,7 +250,8 @@ def __core_tool_contract_task_from(d):
 def __to_tc_from_d(d):
     def _wrapper(task):
         driver = __driver_from_d(d)
-        tc = ToolContract(task, driver)
+        schema_version = d.get("schema_version", "UNKNOWN")
+        tc = ToolContract(task, driver, schema_version)
         return tc
     return _wrapper
 
@@ -283,6 +297,12 @@ def _gather_tool_contract_from(path_or_d):
 def tool_contract_from_d(d):
     """Load tool contract from dict"""
 
+    if Constants.TOOL not in d:
+        raise KeyError("Tool Contract must have {k}".format(k=Constants.TOOL))
+
+    if Constants.TOOL_ID not in d[Constants.TOOL]:
+        raise KeyError("Tool Contract must have {k}.{v}".format(k=Constants.TOOL, v=Constants.TOOL_ID))
+
     task_type = d[Constants.TOOL][Constants.TOOL_TYPE]
 
     dispatch_funcs = {TaskTypes.SCATTERED: _scattered_tool_contract_from,
@@ -302,9 +322,24 @@ def load_tool_contract_from(path_or_d):
     return tool_contract_from_d(path_or_d)
 
 
+# XXX this could probably be more robust
+ at _json_path_or_d
+def load_pipeline_presets_from(d):
+    """Load pipeline presets from dict"""
+    validate_presets(d)
+    presets = PipelinePreset(
+        options=d['options'],
+        task_options=d['taskOptions'],
+        pipeline_id=d['pipelineId'],
+        preset_id=d['presetId'],
+        name=d['name'],
+        description=d['description'])
+    return presets
+
+
 def _write_json(s, output_file):
     with open(output_file, 'w') as f:
-        f.write(json.dumps(s, indent=4, sort_keys=True))
+        f.write(json.dumps(s, indent=4, sort_keys=True, separators=(',', ': ')))
     return s
 
 
diff --git a/pbcommand/resolver.py b/pbcommand/resolver.py
index 3cb899b..f817bdf 100644
--- a/pbcommand/resolver.py
+++ b/pbcommand/resolver.py
@@ -40,28 +40,33 @@ def _resolve_max_nchunks(nchunks_or_symbol, max_nchunks):
 
 
 def _resolve_options(tool_contract, tool_options):
-    resolved_options = {}
+    """ Resolve Task Options from
 
-    # These probably exist somewhere else, feel free to replace:
-    type_map = {'integer': int,
-                'object': object,
-                'boolean': bool,
-                'number': (int, float),
-                'string': basestring}
+    :type tool_contract: ToolContract
+    :type tool_options: dict
+
+    :rtype: dict
+    """
+    resolved_options = {}
 
     # Get and Validate resolved value.
     # TODO. None support should be removed.
     for option in tool_contract.task.options:
-        for optid in option['required']:
-            exp_type = option['properties'][optid]['type']
-            value = tool_options.get(optid, option['properties'][optid]['default'])
-
-            if not isinstance(value, type_map[exp_type]):
-                raise ToolContractError("Incompatible option types. Supplied "
-                                        "{i}. Expected {t}".format(
-                                            i=type(value),
-                                            t=exp_type))
-            resolved_options[optid] = value
+        # This hides whatever underlying JSON grossness remains
+        value = tool_options.get(option.option_id, option.default)
+
+        # Wrap in a try to communicate error messages with reasonable context
+        try:
+            # This expects the PacBioOption subclass to implement the
+            # necessary validating function
+            validated_option = option.validate_option(value)
+            resolved_options[option.option_id] = validated_option
+        except (KeyError, ValueError, IndexError, TypeError) as e:
+            raise ToolContractError("Incompatible option types for {o}. "
+                                    "Supplied {i}. Expected pacbio opt type '{t}' {e}".format(
+                                        o=option.option_id,
+                                        i=type(value),
+                                        t=option.OPTION_TYPE_ID, e=str(e)))
 
     return resolved_options
 
@@ -130,6 +135,7 @@ def _resolve_output_files(output_file_types, root_output_dir):
 
 
 def _resolve_core(tool_contract, input_files, root_output_dir, max_nproc, tool_options, tmp_dir=None):
+    """ tool_options are dict{id:value} of values to override defaults """
 
     if len(input_files) != len(tool_contract.task.input_file_types):
         _d = dict(i=input_files, t=tool_contract.task.input_file_types)
diff --git a/pbcommand/schemas/__init__.py b/pbcommand/schemas/__init__.py
index 46f9046..a3241aa 100644
--- a/pbcommand/schemas/__init__.py
+++ b/pbcommand/schemas/__init__.py
@@ -10,6 +10,7 @@ SCHEMA_REGISTRY = {}
 __all__ = ['validate_pbreport',
            'validate_tc',
            'validate_rtc',
+           'validate_datastore_view_rules',
            'SCHEMA_REGISTRY']
 
 
@@ -25,12 +26,34 @@ def _load_schema(idx, name):
 RTC_SCHEMA = _load_schema("resolved_tool_contract", "resolved_tool_contract.avsc")
 PBREPORT_SCHEMA = _load_schema("pbreport", "pbreport.avsc")
 TC_SCHEMA = _load_schema("tool_contract", "tool_contract.avsc")
+PRESET_SCHEMA = _load_schema("pipeline_presets", "pipeline_presets.avsc")
+DS_VIEW_SCHEMA = _load_schema("datastore_view_rules", "datastore_view_rules.avsc")
+REPORT_SPEC_SCHEMA = _load_schema("report_spec", "report_spec.avsc")
 
 
-def _validate(schema, d):
+def _validate(schema, msg, d):
     """Validate a python dict against a avro schema"""
+    # FIXME(mkocher)(2016-7-16) Add a better error message than "Invalid"
+    if not validate(schema, d):
+        raise IOError("Invalid {m} ".format(m=msg))
+    return True
+
+
+def _is_valid(schema, d):
     return validate(schema, d)
 
-validate_rtc = functools.partial(_validate, RTC_SCHEMA)
-validate_pbreport = functools.partial(_validate, PBREPORT_SCHEMA)
-validate_tc = functools.partial(_validate, TC_SCHEMA)
+
+validate_rtc = functools.partial(_validate, RTC_SCHEMA, "Resolved Tool Contract Model")
+validate_pbreport = functools.partial(_validate, PBREPORT_SCHEMA, "Report Model")
+validate_report = validate_pbreport
+validate_tc = functools.partial(_validate, TC_SCHEMA, "Tool Contract Model")
+validate_presets = functools.partial(_validate, PRESET_SCHEMA, "Pipeline Presets Model")
+validate_datastore_view_rules = functools.partial(_validate, DS_VIEW_SCHEMA, "Pipeline DataStore View Rules")
+validate_report_spec = functools.partial(_validate, REPORT_SPEC_SCHEMA, "Report Specification Model")
+
+is_valid_rtc = functools.partial(_is_valid, RTC_SCHEMA)
+is_valid_report = functools.partial(_is_valid, PBREPORT_SCHEMA)
+is_valid_tc = functools.partial(_is_valid, TC_SCHEMA)
+is_valid_presets = functools.partial(_is_valid, PRESET_SCHEMA)
+is_valid_datastore_view_rules = functools.partial(_is_valid, DS_VIEW_SCHEMA)
+is_valid_report_spec = functools.partial(_is_valid, REPORT_SPEC_SCHEMA)
diff --git a/pbcommand/schemas/datastore.avsc b/pbcommand/schemas/datastore.avsc
new file mode 100644
index 0000000..161c986
--- /dev/null
+++ b/pbcommand/schemas/datastore.avsc
@@ -0,0 +1,89 @@
+{
+  "namespace": "com.pacbio.common.models",
+  "type": "record",
+  "name": "PacBioDataStore",
+  "doc": "Container for datastore files emitted from a 'Job'type (e.g., analysis, import-dataset, merge-dataset)",
+  "fields": [
+    {
+      "name": "createdAt",
+      "type": "string",
+      "doc": "ISO8601 Datetime to specific when the datastore file was created. Example 2016-08-18T07:40:43"
+    },
+    {
+      "name": "updatedAt",
+      "type": "string",
+      "doc": "ISO8601 Datetime to specific when the datastore file was last updated at. Example 2016-08-18T07:40:43"
+    },
+    {
+      "name": "version",
+      "type": "string",
+      "doc": "Datastore schema version "
+    },
+    {
+      "doc": "List of DataStore files in the datastore",
+      "name": "files",
+      "type": {
+        "type": "array",
+        "items": {
+          "type": "record",
+          "name": "DataStoreFile",
+          "doc": "DataStore file that contains metadata of a single output file",
+          "fields": [
+            {
+              "name": "sourceId",
+              "type": "string",
+              "doc": "Source ID unique identifer, must have the form {task-id}-{in|out}-{positional-index} Example `pbsmrtpipe.tasks.dev_hello_worlder-out-0`"
+            },
+            {
+              "name": "fileTypeId",
+              "type": "string",
+              "doc": "File type identifier. Example `PacBio.FileTypes.JsonReport`"
+            },
+            {
+              "name": "createdAt",
+              "type": "string",
+              "doc": "ISO8601 Datetime to specific when the file was created Example 2016-08-18T07:40:43"
+            },
+            {
+              "name": "modifiedAt",
+              "type": "string",
+              "doc": "ISO8601 Datetime to specific when the file was last modified at Example 2016-08-18T07:40:43"
+            },
+            {
+              "name": "path",
+              "type": "string",
+              "doc": "Absolute path to the file. Example /path/to/my-file.gff"
+            },
+            {
+              "name": "fileSize",
+              "type": "string",
+              "doc": "File size in kB"
+            },
+            {
+              "default": false,
+              "name": "isChunked",
+              "type": "boolean",
+              "doc": "Is the file an intermediate file used in a chunked pipeline"
+            },
+            {
+              "name": "uniqueId",
+              "type": "string",
+              "doc": "Globally unique UUID of the datastore file. Example feddd711-9b37-4cc4-ac5a-4dd4134ad0ca"
+            },
+            {
+              "name": "name",
+              "type": "string",
+              "doc": "The default name of the datastore file will be used in the UI"
+            },
+            {
+              "name": "description",
+              "type": "string",
+              "doc": "the default description of the datastore file will be used in the UI"
+            }
+          ]
+        }
+      }
+    }
+  ]
+}
+
diff --git a/pbcommand/schemas/datastore_view_rules.avsc b/pbcommand/schemas/datastore_view_rules.avsc
new file mode 100644
index 0000000..32fa84a
--- /dev/null
+++ b/pbcommand/schemas/datastore_view_rules.avsc
@@ -0,0 +1,56 @@
+{
+  "namespace": "com.pacbio.common.models.datastore",
+  "type": "record",
+  "name": "PipelineDataStoreViewRules",
+  "doc": "Custom view of the DataStoreFile(s) emitted from a specific pipeline (by id)",
+  "fields": [
+    {
+      "name": "pipelineId",
+      "type": "string",
+      "doc": "Fully qualified pipeline id to apply rules to. e.g., pbsmrtpipe.pipelines.dev_01"
+    },
+    {
+      "name": "smrtlinkVersion",
+      "type": "string",
+      "doc": "Version of SMRTLink to which these rules apply. e.g., '3.2'"
+    },
+    {
+      "name": "rules",
+      "type": {
+        "type": "array",
+        "items": {
+          "type": "record",
+          "name": "DataStoreViewRule",
+          "doc": "Custom View of specific DataStoreFile by source id in the datastore.json",
+          "fields": [
+            {
+              "name": "sourceId",
+              "type": "string",
+              "doc": "Source ID as it appears in the pbsmrtpipe datastore, Should have the form {task-id}-{in|out}-{positional-index}"
+            },
+            {
+              "name": "fileTypeId",
+              "type": "string",
+              "doc": "File type identifier, e.g. PacBio.FileTypes.JsonReport"
+            },
+            {
+              "name": "isHidden",
+              "type": "boolean",
+              "doc": "Specifies that a file should not appear in the UI"
+            },
+            {
+              "name": "name",
+              "type": ["string", "null"],
+              "doc": "Override the display name (optional). If this is null, the default name of the datastore file will be used in UI"
+            },
+            {
+              "name": "description",
+              "type": ["string", "null"],
+              "doc": "Override the display description (optional). If this is null, the default description of the datastore file will be used in UI"
+            }
+          ]
+        }
+      }
+    }
+  ]
+}
diff --git a/pbcommand/schemas/pbreport.avsc b/pbcommand/schemas/pbreport.avsc
index 3fb37e1..610c9b8 100644
--- a/pbcommand/schemas/pbreport.avsc
+++ b/pbcommand/schemas/pbreport.avsc
@@ -2,11 +2,28 @@
   "namespace": "com.pacbio.common.models.reports",
   "type": "record",
   "name": "Report",
+  "doc": "PacBio Report data model. Except where specified all `id` values must be of the form [A-z][0-9]_ (e.g., `mapping_stats`)",
   "fields": [
     {
       "name": "id",
       "type": "string",
-      "desc": "Pbreports style id, must only have [A-z][0-9]_"
+      "doc": "Pbreports style id, must only have [A-z][0-9]_"
+    },
+     {
+      "name": "version",
+      "type": "string",
+      "doc": "Version of the Report Schema Spec"
+    },
+     {
+      "name": "uuid",
+      "type": "string",
+      "doc": "Report UUID"
+    },
+     {
+      "name": "title",
+      "type": ["string", "null"],
+      "default": null,
+      "doc": "Report Display name"
     },
     {
       "name": "attributes",
@@ -17,14 +34,17 @@
           "name": "ReportAttribute",
           "fields": [
             {
+              "doc": "Report Attribute id",
               "name": "id",
               "type": "string"
             },
             {
+              "doc": "Report Attribute display name",
               "name": "name",
               "type": "string"
             },
             {
+              "doc": "Report Attribute value",
               "name": "value",
               "type": [
                 "string",
@@ -45,10 +65,12 @@
           "name": "PlotGroup",
           "fields": [
             {
+              "doc": "Plot group Id",
               "name": "id",
               "type": "string"
             },
             {
+              "doc": "The display name of plot group",
               "name": "title",
               "type": "string"
             },
@@ -58,9 +80,10 @@
                 "string",
                 "null"
               ],
-              "desc": "Not clear what the usecase is of this"
+              "doc": "Not clear what the usecase is of this"
             },
             {
+              "doc": "Thumbnail image path for the entire PlotGroup",
               "name": "thumbnail",
               "type": [
                 "string",
@@ -68,26 +91,33 @@
               ]
             },
             {
+              "doc": "List of Plots",
               "name": "plots",
               "type": {
                 "type": "array",
                 "items": {
+                  "doc": "PacBio Report Plot",
                   "type": "record",
                   "name": "ReportPlot",
                   "fields": [
                     {
                       "name": "id",
                       "type": "string",
-                      "desc": "Plot Id"
+                      "doc": "Plot Id"
                     },
                     {
                       "name": "image",
                       "type": "string",
-                      "desc": "Relative Path to Image"
+                      "doc": "Png Path to Image (must be relative to the path of report.json file)"
+                    },
+                     {
+                      "name": "title",
+                      "type": ["string", "null"],
+                      "doc": "Display Name of Plot"
                     },
                     {
                       "name": "caption",
-                      "desc": "Caption of the Plot",
+                      "doc": "Caption of the Plot",
                       "type": [
                         "string",
                         "null"
@@ -95,7 +125,7 @@
                     },
                     {
                       "name": "thumbnail",
-                      "desc": "Relative path to thumbnail of the Plot",
+                      "doc": "Relative path to thumbnail of the Plot (must be relative to the path of report.json file)",
                       "type": [
                         "string",
                         "null"
@@ -118,17 +148,18 @@
           "name": "ReportTable",
           "fields": [
             {
+              "doc": "Report Table Id",
               "name": "id",
               "type": "string"
             },
             {
               "name": "title",
               "type": "string",
-              "desc": "Title of the Table"
+              "doc": "Display name of the Table"
             },
             {
               "name": "columns",
-              "desc": "List of Columns",
+              "doc": "List of Columns",
               "type": {
                 "type": "array",
                 "items": {
@@ -136,21 +167,26 @@
                   "name": "ReportTableColumn",
                   "fields": [
                     {
+                      "doc": "Unique id of column (must be report id format style)",
                       "name": "id",
                       "type": "string"
                     },
                     {
+                      "doc": "Display name of Column",
                       "name": "header",
                       "type": "string"
                     },
                     {
                       "name": "value",
-                      "desc": "Column values. Attention to mixed-types attempting to represent 'NA'",
+                      "_comment": "This is a quite unclear interface",
+                      "doc": "Column values. Attention to mixed-types attempting to represent 'NA'",
                       "type": {
                         "type": "array",
                         "items": [
                           "int",
-                          "float"
+                          "float",
+                          "string",
+                          "null"
                         ]
                       }
                     }
diff --git a/pbcommand/schemas/pipeline_presets.avsc b/pbcommand/schemas/pipeline_presets.avsc
new file mode 100644
index 0000000..fd0f0e1
--- /dev/null
+++ b/pbcommand/schemas/pipeline_presets.avsc
@@ -0,0 +1,44 @@
+{
+  "namespace": "com.pacbio.common.models.pipeline_presets",
+  "type": "record",
+  "name": "PipelinePreset",
+  "doc": "Pipeline Preset with custom task options",
+  "fields": [
+    {
+      "name": "pipelineId",
+      "type": "string",
+      "doc": "Fully qualified pipeline ID, must only have [A-Z][0-9]_."
+    },
+    {
+      "name": "presetId",
+      "type": "string",
+      "doc": "Fully qualified ID of the pipeline preset, must only have [A-Z][0-9]_."
+    },
+    {
+      "name": "name",
+      "type": "string",
+      "doc": "Plain-English name of the task option as it will appear in UI"
+    },
+    {
+      "name": "description",
+      "type": "string",
+      "doc": "More detailed description of the task option as it will appear in UI"
+    },
+    {
+      "doc": "Workflow level options. See the pbsmrtpipe docs for details",
+      "name": "options",
+      "type": {
+        "type": "map",
+        "values": ["long", "boolean", "string", "int", "double"]
+      }
+    },
+    {
+      "doc": "Task level options. Please see the pipeline of interest to get a list of available task options using `pbsmrtpipe show-template-details <my-pipeline-id>`",
+      "name": "taskOptions",
+      "type": {
+        "type": "map",
+        "values": ["long", "boolean", "string", "int", "double"]
+      }
+    }
+  ]
+}
diff --git a/pbcommand/schemas/pipeline_template_view_rules.avsc b/pbcommand/schemas/pipeline_template_view_rules.avsc
new file mode 100644
index 0000000..7dc9a5a
--- /dev/null
+++ b/pbcommand/schemas/pipeline_template_view_rules.avsc
@@ -0,0 +1,50 @@
+{
+  "namespace": "com.pacbio.common.models.pipeline",
+  "type": "record",
+  "name": "PipelineTemplateView",
+  "doc": "Custom views of a Resolved Pipeline Template and task options",
+  "fields": [
+    {
+      "name": "id",
+      "type": "string",
+      "doc": "Fully qualified pipeline ID, must only have [A-Z][0-9]_."
+    },
+    {
+      "name": "name",
+      "type": "string",
+      "doc": "Plain-English name of the pipeline as it will appear in UI"
+    },
+    {
+      "name": "description",
+      "type": "string",
+      "doc": "More detailed description of the pipeline as it will appear in UI"
+    },
+    {
+      "name": "taskOptions",
+      "type": {
+        "type": "array",
+        "items": {
+          "type": "record",
+          "name": "TaskOptionViewRule",
+          "fields": [
+            {
+              "name": "id",
+              "type": "string",
+              "doc": "Source ID as it appears in the pbsmrtpipe datastore, Should have the from {task-id}-{in|out}-{positional-index}"
+            },
+            {
+              "name": "hidden",
+              "type": "boolean",
+              "doc": "Specifies that a field should not appear in the UI"
+            },
+            {
+              "name": "advanced",
+              "type": "boolean",
+              "doc": "Specifies that a field should only appear in the advanced settings window"
+            }
+          ]
+        }
+      }
+    }
+  ]
+}
diff --git a/pbcommand/schemas/report_spec.avsc b/pbcommand/schemas/report_spec.avsc
new file mode 100644
index 0000000..38b4b4d
--- /dev/null
+++ b/pbcommand/schemas/report_spec.avsc
@@ -0,0 +1,248 @@
+{
+  "namespace": "com.pacbio.common.models.reports",
+  "type": "record",
+  "name": "ReportSpec",
+  "doc": "Specification and view rules for a single PacBio report.",
+  "fields": [
+    {
+      "name": "id",
+      "type": "string",
+      "doc": "Pbreports style id, must only have [A-z][0-9]_"
+    },
+    {
+      "name": "version",
+      "type": "string",
+      "doc": "Version of the report corresponding to this spec"
+    },
+    {
+      "name": "title",
+      "type": "string",
+      "doc": "Report display name"
+    },
+    {
+      "name": "description",
+      "type": ["string", "null"],
+      "default": null,
+      "doc": "Plain-English description of this report, suitable for documentation"
+    },
+    {
+      "name": "isHidden",
+      "type": ["boolean", "null"],
+      "default": null,
+      "doc": "Flag to hide the entire report"
+    },
+    {
+      "name": "attributes",
+      "type": {
+        "type": "array",
+        "items": {
+          "type": "record",
+          "name": "AttributeSpec",
+          "fields": [
+            {
+              "name": "id",
+              "doc": "Report attribute ID",
+              "type": "string"
+            },
+            {
+              "name": "name",
+              "type": "string",
+              "doc": "Report attribute display name"
+            },
+            {
+              "name": "description",
+              "type": ["string", "null"],
+              "default": null,
+              "doc": "Plain-English description of the attribute's meaning"
+            },
+            {
+              "name": "type",
+              "type": "string",
+              "doc": "Expected type of the attribute value"
+            },
+            {
+              "name": "format",
+              "type": ["string", "null"],
+              "default": null,
+              "doc": "Format string to apply to the value in UI"
+            },
+            {
+              "name": "isHidden",
+              "type": ["boolean", "null"],
+              "default": null,
+              "doc": "Flag to hide this attribute"
+            }
+          ]
+        }
+      }
+    },
+    {
+      "name": "tables",
+      "type": {
+        "type": "array",
+        "items": {
+          "type": "record",
+          "name": "TableSpec",
+          "fields": [
+            {
+              "doc": "Report table Id",
+              "name": "id",
+              "type": "string"
+            },
+            {
+              "name": "title",
+              "type": "string",
+              "doc": "Display name of the Table"
+            },
+            {
+              "name": "description",
+              "type": ["string", "null"],
+              "default": null,
+              "doc": "Plain-English description of the table"
+            },
+            {
+              "name": "isHidden",
+              "type": ["boolean", "null"],
+              "default": null,
+              "doc": "Flag to hide this table"
+            },
+            {
+              "name": "columns",
+              "doc": "List of Columns",
+              "type": {
+                "type": "array",
+                "items": {
+                  "type": "record",
+                  "name": "TableColumnSpec",
+                  "fields": [
+                    {
+                      "doc": "Unique id of column (must be report id format style)",
+                      "name": "id",
+                      "type": "string"
+                    },
+                    {
+                      "name": "header",
+                      "type": "string",
+                      "doc": "Display name of Column"
+                    },
+                    {
+                      "name": "description",
+                      "type": ["string", "null"],
+                      "default": null,
+                      "doc": "Plain-English description of column"
+                    },
+                    {
+                      "name": "type",
+                      "type": "string",
+                      "doc": "Expected type of column values"
+                    },
+                    {
+                      "name": "format",
+                      "type": ["string", "null"],
+                      "default": null,
+                      "doc": "Format string to apply to values in the UI"
+                    },
+                    {
+                      "name": "isHidden",
+                      "type": ["boolean", "null"],
+                      "default": null,
+                      "doc": "Flag to hide this column"
+                    }
+                  ]
+                }
+              }
+            }
+          ]
+        }
+      }
+    },
+    {
+      "name": "plotGroups",
+      "type": {
+        "type": "array",
+        "items": {
+          "type": "record",
+          "name": "PlotGroupSpec",
+          "fields": [
+            {
+              "name": "id",
+              "type": "string",
+              "doc": "Plot group ID"
+            },
+            {
+              "name": "title",
+              "type": "string",
+              "doc": "Plot group title"
+            },
+            {
+              "name": "legend",
+              "type": ["string", "null"],
+              "doc": "Not clear what the use case of this is",
+              "default": null
+            },
+            {
+              "name": "description",
+              "type": ["string", "null"],
+              "default": null,
+              "doc": "Plain-English description"
+            },
+            {
+              "doc": "List of Plots",
+              "name": "plots",
+              "type": {
+                "type": "array",
+                "items": {
+                  "doc": "PacBio Report Plot",
+                  "type": "record",
+                  "name": "PlotSpec",
+                  "fields": [
+                    {
+                      "name": "id",
+                      "type": "string",
+                      "doc": "Plot Id"
+                    },
+                    {
+                      "name": "title",
+                      "type": ["string", "null"],
+                      "doc": "Display Name of Plot"
+                    },
+                    {
+                      "name": "caption",
+                      "doc": "Caption of the Plot",
+                      "type": ["string", "null"],
+                      "default": null
+                    },
+                    {
+                      "name": "description",
+                      "type": ["string", "null"],
+                      "doc": "Plain-English description",
+                      "default": null
+                    },
+                    {
+                      "name": "xlabel",
+                      "type": ["string", "null"],
+                      "default": null,
+                      "doc": "X-axis label (optional)"
+                    },
+                    {
+                      "name": "ylabel",
+                      "type": ["string", "null"],
+                      "default": null,
+                      "doc": "Y-axis label (optional)"
+                    },
+                    {
+                      "name": "isHidden",
+                      "type": ["boolean", "null"],
+                      "default": null,
+                      "doc": "Flag to hide this plot"
+                    }
+                  ]
+                }
+              }
+            }
+          ]
+        }
+      }
+    }
+  ]
+}
diff --git a/pbcommand/schemas/resolved_tool_contract.avsc b/pbcommand/schemas/resolved_tool_contract.avsc
index 9d3c47f..66a45a5 100644
--- a/pbcommand/schemas/resolved_tool_contract.avsc
+++ b/pbcommand/schemas/resolved_tool_contract.avsc
@@ -2,14 +2,18 @@
   "namespace": "com.pacbio.common.models.contracts",
   "type": "record",
   "name": "ResolvedToolContract",
+  "doc": "Resolved `ToolContract` used to run tasks in pipelines",
   "fields": [
     {
       "name": "resolved_tool_contract",
+      "doc": "Container for Resolved Tool Contract metadata",
       "type": {
+        "doc": "Resolved Tool Contract Task metadata, such as nproc to use, resolved paths to input and output files and resolved Task Option values",
         "type": "record",
         "name": "ResolvedToolContractTask",
         "fields": [
           {
+            "doc": "Resolved paths to input files (Paths must be absolute)",
             "name": "input_files",
             "type": {
               "type": "array",
@@ -19,6 +23,7 @@
             }
           },
           {
+            "doc": "Resolved paths to output files (Paths must be absolute)",
             "name": "output_files",
             "type": {
               "type": "array",
@@ -32,26 +37,32 @@
             "name": "options",
             "type": {
               "type": "map",
-              "values": ["long", "boolean", "string", "int"]
+              "values": ["long", "boolean", "string", "int", "float"]
             }
           },
           {
+            "doc": "Number of Processors to use",
             "name": "nproc",
             "type": "int"
           },
           {
+            "doc": "Determine if the task should be submitted to the distributed computing env (e.g, SGE). Only applies if the system is configured to support distributed computing ",
             "name": "is_distributed",
             "type": "boolean"
           },
           {
+            "doc": "Task type",
             "name": "task_type",
             "type": "string"
           },
           {
+            "doc": "Globally unique id to reference a Tool Contract. This is sometimes referred to as the `task` id (for historical purposes)",
             "name": "tool_contract_id",
             "type": "string"
           },
           {
+            "_comment": "FIXME(mkocher) This needs to be well defined",
+            "doc": "Log level to emit to. Supports the standard INFO, DEBUG, ERROR values",
             "name": "log_level",
             "type": "string"
           },
@@ -68,12 +79,14 @@
       }
     },
     {
+      "doc": "Driver executable to be called to execute the task",
       "name": "driver",
       "type": {
         "type": "record",
         "name": "Driver",
         "fields": [
           {
+            "doc": "Executable to be called. The first positional argument will be the path to the `ResolvedToolContract` Example `python -m mytool.module run-tool-contract ` or `my-exe `. The exe must be in $PATH before running the task.",
             "name": "exe",
             "type": "string"
           }
diff --git a/pbcommand/schemas/tool_contract.avsc b/pbcommand/schemas/tool_contract.avsc
index 9f4a7f2..69d212f 100644
--- a/pbcommand/schemas/tool_contract.avsc
+++ b/pbcommand/schemas/tool_contract.avsc
@@ -4,10 +4,27 @@
   "name": "ToolContract",
   "fields": [
     {
+      "doc": "Version of the ToolContract",
+      "name": "version",
+      "type": "string"
+    },
+     {
+      "doc": "Schema Version of the ToolContract",
+      "name": "schema_version",
+      "type": ["string", "null"]
+    },
+    {
+      "_comment": "(this is duplicated in the Task?)",
+      "doc": "Fully qualified id of the tool contract (in the legacy model this is also the task type id)",
+      "name": "tool_contract_id",
+      "type": "string"
+    },
+    {
       "name": "tool_contract",
       "type": {
         "type": "record",
         "name": "ToolContractTask",
+        "doc": "Task for defining metadata of the task interface such as Input and Output file types, task options and other metadata",
         "fields": [
           {
             "name": "input_types",
@@ -18,20 +35,22 @@
                 "name": "ToolInputFile",
                 "fields": [
                   {
+                    "doc": "Id of input file",
                     "name": "id",
                     "type": "string"
                   },
                   {
-                    "doc": "PacBio File Type identifier",
+                    "doc": "PacBio File Type identifier, PacBio.DataSet.SubreadSet",
                     "name": "file_type_id",
                     "type": "string"
                   },
                   {
-                    "doc": "Display Name",
+                    "doc": "Display Name of input file type",
                     "name": "title",
                     "type": "string"
                   },
                   {
+                    "doc": "Description of input file type",
                     "name": "description",
                     "type": "string"
                   }
@@ -41,32 +60,37 @@
           },
           {
             "name": "output_types",
+            "doc": "Output file types of Task",
             "type": {
               "type": "array",
               "items": {
                 "type": "record",
                 "name": "ToolOutputFile",
+                "doc": "",
                 "fields": [
                   {
+                    "_comment": "FIXME(mpkocher) This needs to be clearly defined",
+                    "doc": "Unique id for referencing the output file",
                     "name": "id",
                     "type": "string"
                   },
                   {
-                    "doc": "PacBio FileType identifier",
+                    "doc": "PacBio FileType identifier, e.g., PacBio.DataSets.SubreadSet",
                     "name": "file_type_id",
                     "type": "string"
                   },
                   {
-                    "doc": "Display Name",
+                    "doc": "Display Name of the output file name",
                     "name": "title",
                     "type": "string"
                   },
                   {
-                    "doc": "Default file name",
+                    "doc": "Default base name of the file name. This must be provided without the extension. The extension is determined by the `file_type_id`",
                     "name": "default_name",
                     "type": "string"
                   },
                   {
+                    "doc": "Description of Output file",
                     "name": "description",
                     "type": "string"
                   }
@@ -81,14 +105,9 @@
               "items": {
                 "type": "record",
                 "name": "PacBioOptions",
-                "fields": [
-                  {
-                    "name": "pb_option",
-                    "type": {
-                      "type": "record",
-                      "name": "pb_option",
-                      "fields": [
+               "fields": [
                         {
+                          "doc": "Default value for the task option. Every task *must* have default value",
                           "name": "default",
                           "type": [
                             "int",
@@ -98,26 +117,56 @@
                           ]
                         },
                         {
-                          "name": "option_id",
+                          "doc": "Globally unique id of the form {namespace}.task_options.{key}. Example (pbtacos.task_options.max_records)",
+                          "name": "id",
                           "type": "string"
                         },
                         {
+                          "doc": "Display name of task option",
                           "name": "name",
                           "type": "string"
                         },
                         {
+                          "doc": "Description of Task Option",
                           "name": "description",
                           "type": "string"
+                        },
+                        {
+                          "doc": "PacBio task option type",
+                          "name": "optionTypeId",
+                          "type": {
+                            "doc": "This needs to be considerably improved and clarified. The option type must be consistent with the value defined. The naming is using camelcase because the same data model is used in the pipeline template.",
+                            "type": "enum",
+                            "name": "PacBioOptionType",
+                            "aliases": ["com.pacbio.common.models.contracts.PacBioOptionType"],
+                            "symbols": [
+                              "integer",
+                              "boolean",
+                              "string",
+                              "float",
+                              "choice_float",
+                              "choice_string",
+                              "choice_integer"
+                            ]
+                          }
                         }
                       ]
-                    }
-                  }
-                ]
               }
             }
           },
           {
-            "doc": "Number of processors to use",
+            "doc": "Description of Tool/Task",
+            "name": "description",
+            "type": "string"
+          },
+          {
+            "doc": "Display Name of Tool/Task",
+            "name": "name",
+            "type": "string"
+          },
+          {
+            "_comment": "FIXME(mpkocher) This can be given as a Symbol `$max_nproc`",
+            "doc": "Number of processors to use. This can be given as a Symbol `$max_nproc` See pbsmrtpipe docs for more details",
             "name": "nproc",
             "type": "int"
           },
@@ -129,7 +178,8 @@
           {
             "doc": "Task class type, Standard, Scatter, Gather",
             "name": "task_type",
-            "type": "string"
+            "type": "string",
+            "default": "pbsmrtpipe.task_types.standard"
           },
           {
             "doc": "Determine if the task will be submitted to the cluster resources",
@@ -137,6 +187,7 @@
             "type": "boolean"
           },
           {
+            "doc": "This needs to be converted to an ENUM. Allowed values $tmpfile $tmpdir",
             "name": "resource_types",
             "type": {
               "type": "array",
@@ -155,8 +206,14 @@
         "name": "ToolDriver",
         "fields": [
           {
+            "doc": "path to exe. The first arg will the the resolved tool contract JSON",
             "name": "exe",
             "type": "string"
+          },
+          {
+            "doc": "Serialization type. Either 'json' or 'avro' binary format ",
+            "name": "serialization",
+            "type": "string"
           }
         ]
       }
diff --git a/pbcommand/services/cli.py b/pbcommand/services/cli.py
index e70aa31..0e84546 100644
--- a/pbcommand/services/cli.py
+++ b/pbcommand/services/cli.py
@@ -1,4 +1,5 @@
-"""CLI for interacting with the PacBio Services
+"""
+CLI (deprecated) for interacting with the PacBio Services
 
 0.1.0 Version, Import/Convert datasets
 
@@ -12,7 +13,7 @@ pbservice import-fasta /path/to/file.fasta --name my-name --organism my-org --pl
 pbservice run-analysis path/to/file.json
 pbservice run-merge-dataset path/to/file.json
 
-
+This program is largely replaced by the Scala version in 'smrtflow'.
 """
 import argparse
 import json
@@ -23,16 +24,19 @@ import sys
 import logging
 import functools
 import time
-import tempfile
 import traceback
 import uuid
+import warnings
+
 from requests import RequestException
+import iso8601
 
 from pbcommand.cli import get_default_argparser_with_base_opts
 from pbcommand.models import FileTypes
 from pbcommand.services import (ServiceAccessLayer,
                                 ServiceEntryPoint,
                                 JobExeError)
+from pbcommand.services.service_access_layer import (DATASET_METATYPES_TO_ENDPOINTS, )
 from pbcommand.validators import validate_file, validate_or
 from pbcommand.common_options import add_common_options
 from pbcommand.utils import (is_dataset,
@@ -41,7 +45,7 @@ from pbcommand.utils import (is_dataset,
 
 from .utils import to_ascii
 
-__version__ = "0.2.0"
+__version__ = "0.2.1"
 
 log = logging.getLogger(__name__)
 log.addHandler(logging.NullHandler())  # suppress warning message
@@ -50,7 +54,31 @@ log.addHandler(logging.NullHandler())  # suppress warning message
 _LOG_FORMAT = '[%(levelname)s] %(asctime)-15s %(message)s'
 
 
+def _list_dict_printer(list_d):
+    for i in list_d:
+        print i
+
+try:
+    # keep this to keep backward compatible
+    from tabulate import tabulate
+
+    def printer(list_d):
+        print tabulate(list_d)
+    list_dict_printer = printer
+except ImportError:
+    list_dict_printer = _list_dict_printer
+
+
 class Constants(object):
+
+    # When running from the commandline, the host and port will default to these
+    # values if provided
+    ENV_PB_SERVICE_HOST = "PB_SERVICE_HOST"
+    ENV_PB_SERVICE_PORT = "PB_SERVICE_PORT"
+
+    DEFAULT_HOST = "http://localhost"
+    DEFAULT_PORT = 8070
+
     FASTA_TO_REFERENCE = "fasta-to-reference"
     RS_MOVIE_TO_DS = "movie-metadata-to-dataset"
 
@@ -63,6 +91,13 @@ def _is_xml(path):
     return path.endswith(".xml")
 
 
+def add_max_items_option(default, desc="Max items to return"):
+    def f(p):
+        p.add_argument('-m', '--max-items', type=int, default=default, help=desc)
+        return p
+    return f
+
+
 def validate_xml_file_or_dir(path):
     px = os.path.abspath(os.path.expanduser(path))
     if os.path.isdir(px):
@@ -73,6 +108,9 @@ def validate_xml_file_or_dir(path):
         raise argparse.ArgumentTypeError("Expected dir or file '{p}'".format(p=path))
 
 
+validate_int_or_uuid = validate_or(int, uuid.UUID, "Expected Int or UUID")
+
+
 def _get_size_mb(path):
     return os.stat(path).st_size / 1024.0 / 1024.0
 
@@ -98,9 +136,15 @@ def add_block_option(p):
 
 
 def add_sal_options(p):
+
+    default_port = os.environ.get(Constants.ENV_PB_SERVICE_PORT, Constants.DEFAULT_PORT)
+    default_host = os.environ.get(Constants.ENV_PB_SERVICE_HOST, Constants.DEFAULT_HOST)
+
     p.add_argument('--host', type=str,
-                   default="http://localhost", help="Server host")
-    p.add_argument('--port', type=int, default=8070, help="Server Port")
+                   default=default_host,
+                   help="Server host. Override the default with env {v}".format(v=Constants.ENV_PB_SERVICE_HOST))
+    p.add_argument('--port', type=int, default=default_port,
+                   help="Server Port. Override default with env {v}".format(v=Constants.ENV_PB_SERVICE_PORT))
     return p
 
 
@@ -165,16 +209,20 @@ def import_local_dataset(sal, path):
     else:
         ds = openDataSet(path, strict=True)
         if isinstance(ds, ReadSet) and not isinstance(ds, HdfSubreadSet):
-            log.info("checking BAM file integrity")
-            for rr in ds.resourceReaders():
-                try:
-                    last_record = rr[-1]
-                except Exception as e:
-                    log.exception("Import failed because the underlying "+
-                                  "data appear to be corrupted.  Run "+
-                                  "'pbvalidate' on the dataset for more "+
-                                  "thorough checking.")
-                    return 1
+            if len(ds) > 0:
+                log.info("checking BAM file integrity")
+                for rr in ds.resourceReaders():
+                    try:
+                        _ = rr[-1]
+                    except Exception as e:
+                        log.exception("Import failed because the underlying "+
+                                      "data appear to be corrupted.  Run "+
+                                      "'pbvalidate' on the dataset for more "+
+                                      "thorough checking.")
+                        return 1
+            else:
+                log.warn("Empty dataset - will import anyway")
+
     # this will raise if the import wasn't successful
     _ = sal.run_import_local_dataset(path)
     log.info("Successfully import dataset from {f}".format(f=path))
@@ -274,7 +322,7 @@ def run_analysis_job(sal, job_name, pipeline_id, service_entry_points, block=Fal
     if time_out is None:
         time_out = sal.JOB_DEFAULT_TIMEOUT
     status = sal.get_status()
-    log.info("Status {x}".format(x=status['message']))
+    log.info("System:{i} v:{v} Status:{x}".format(x=status['message'], i=status['id'], v=status['version']))
 
     resolved_service_entry_points = []
     for service_entry_point in service_entry_points:
@@ -327,7 +375,7 @@ def args_emit_analysis_template(args):
              taskOptions=[],
              workflowOptions=[])
 
-    sx = json.dumps(d, sort_keys=True, indent=4)
+    sx = json.dumps(d, sort_keys=True, indent=4, separators=(',', ': '))
     print sx
 
     return 0
@@ -347,26 +395,55 @@ def args_get_sal_summary(args):
 
 def add_get_job_options(p):
     add_base_and_sal_options(p)
-    p.add_argument("job_id", type=int, help="Job id")
+    p.add_argument("job_id", type=validate_int_or_uuid, help="Job id or UUID")
     return p
 
 
 def run_get_job_summary(host, port, job_id):
     sal = get_sal_and_status(host, port)
     job = sal.get_job_by_id(job_id)
+    epoints = sal.get_analysis_job_entry_points(job_id)
 
     if job is None:
         log.error("Unable to find job {i} from {u}".format(i=job_id, u=sal.uri))
     else:
-        print job
+        # this is not awesome, but the scala code should be the fundamental
+        # tool
+        print "Job {}".format(job_id)
+        # The settings will often make this unreadable
+        print job._replace(settings={})
+        print " Entry Points {}".format(len(epoints))
+        for epoint in epoints:
+            print "  {}".format(epoint)
 
     return 0
 
 
+def add_get_job_list_options(p):
+    fs = [add_base_and_sal_options,
+          add_max_items_option(25, "Max Number of jobs")]
+    f = compose(*fs)
+    return f(p)
+
+
 def args_get_job_summary(args):
     return run_get_job_summary(args.host, args.port, args.job_id)
 
-validate_int_or_uuid = validate_or(int, uuid.UUID, "Expected Int or UUID")
+
+def run_job_list_summary(host, port, max_items, sort_by=None):
+    sal = get_sal_and_status(host, port)
+
+    jobs = sal.get_analysis_jobs()
+
+    jobs_list = jobs if sort_by is None else sorted(jobs, cmp=sort_by)
+
+    printer(jobs_list[:max_items])
+
+    return 0
+
+
+def args_get_job_list_summary(args):
+    return run_job_list_summary(args.host, args.port, args.max_items, sort_by=_cmp_sort_by_id_desc)
 
 
 def add_get_dataset_options(p):
@@ -375,16 +452,74 @@ def add_get_dataset_options(p):
     return p
 
 
+def add_get_dataset_list_options(p):
+    add_base_and_sal_options(p)
+    fx = add_max_items_option(25, "Max number of Datasets to show")
+    fx(p)
+    default_dataset_type = DATASET_METATYPES_TO_ENDPOINTS[FileTypes.DS_SUBREADS]
+    # this should be choice
+    p.add_argument('-t', '--dataset-type', type=str, default=default_dataset_type, help="DataSet Meta type")
+    return p
+
+
 def run_get_dataset_summary(host, port, dataset_id_or_uuid):
 
     sal = get_sal_and_status(host, port)
 
+    log.debug("Getting dataset {d}".format(d=dataset_id_or_uuid))
     ds = sal.get_dataset_by_uuid(dataset_id_or_uuid)
 
     if ds is None:
-        log.info("Unable to find DataSet '{i}' on {u}".format(i=dataset_id_or_uuid, u=sal.uri))
+        log.error("Unable to find DataSet '{i}' on {u}".format(i=dataset_id_or_uuid, u=sal.uri))
     else:
-        print ds
+        print pprint.pformat(ds, indent=2)
+
+    return 0
+
+
+def _cmp_sort_by_id_key_desc(a, b):
+    return b['id'] - a['id']
+
+
+def _cmp_sort_by_id_desc(a, b):
+    return b.id - a.id
+
+
+def run_get_dataset_list_summary(host, port, dataset_type, max_items, sort_by=None):
+    """
+
+    Display a list of Dataset summaries
+
+    :param host:
+    :param port:
+    :param dataset_type:
+    :param max_items:
+    :param sort_by: func to sort resources sort_by = lambda x.created_at
+    :return:
+    """
+    sal = get_sal_and_status(host, port)
+
+    def to_ep(file_type):
+        return DATASET_METATYPES_TO_ENDPOINTS[file_type]
+
+    # FIXME(mkocher)(2016-3-26) need to centralize this on the dataset "shortname"?
+    fs = {to_ep(FileTypes.DS_SUBREADS): sal.get_subreadsets,
+          to_ep(FileTypes.DS_REF): sal.get_referencesets,
+          to_ep(FileTypes.DS_ALIGN): sal.get_alignmentsets,
+          to_ep(FileTypes.DS_BARCODE): sal.get_barcodesets
+          }
+
+    f = fs.get(dataset_type)
+
+    if f is None:
+        raise KeyError("Unsupported dataset type {t} Supported types {s}".format(t=dataset_type, s=fs.keys()))
+    else:
+        datasets = f()
+        # this needs to be improved
+        sorted_datasets = datasets if sort_by is None else sorted(datasets, cmp=sort_by)
+
+        print "Number of {t} Datasets {n}".format(t=dataset_type, n=len(datasets))
+        list_dict_printer(sorted_datasets[:max_items])
 
     return 0
 
@@ -393,6 +528,14 @@ def args_run_dataset_summary(args):
     return run_get_dataset_summary(args.host, args.port, args.id_or_uuid)
 
 
+def args_run_dataset_list_summary(args):
+    return run_get_dataset_list_summary(args.host,
+                                        args.port,
+                                        args.dataset_type,
+                                        args.max_items,
+                                        sort_by=_cmp_sort_by_id_key_desc)
+
+
 def subparser_builder(subparser, subparser_id, description, options_func, exe_func):
     """
     Util to add subparser options
@@ -440,9 +583,15 @@ def get_parser():
     job_summary_desc = "Get Job Summary by Job Id"
     builder('get-job', job_summary_desc, add_get_job_options, args_get_job_summary)
 
+    job_list_summary_desc = "Get Job Summary by Job Id"
+    builder('get-jobs', job_list_summary_desc, add_get_job_list_options, args_get_job_list_summary)
+
     ds_summary_desc = "Get DataSet Summary by DataSet Id or UUID"
     builder('get-dataset', ds_summary_desc, add_get_dataset_options, args_run_dataset_summary)
 
+    ds_list_summary_desc = "Get DataSet List Summary by DataSet Type"
+    builder('get-datasets', ds_list_summary_desc, add_get_dataset_list_options, args_run_dataset_list_summary)
+
     return p
 
 
@@ -479,13 +628,21 @@ def main_runner(argv, parser, exe_runner_func,
     """
     Fundamental interface to commandline applications
     """
+
+    dep_msg = "The `pbservice` commandline is deprecated and will be removed " \
+              "in a future version. Please using the scala implementation in smrtflow " \
+              "at https://github.com/PacificBiosciences/smrtflow"
+
     started_at = time.time()
     args = parser.parse_args(argv)
 
-    level = get_parsed_args_log_level(args, default_level=logging.DEBUG)
+    level = get_parsed_args_log_level(args, default_level=level)
     console_or_file = args.log_file
     setup_logger(console_or_file, level, formatter=str_formatter)
 
+    warnings.warn(dep_msg, DeprecationWarning)
+    log.warn(dep_msg)
+
     log.debug(args)
     log.info("Starting tool version {v}".format(v=parser.version))
 
diff --git a/pbcommand/services/models.py b/pbcommand/services/models.py
index 300e0ad..bedd5ad 100644
--- a/pbcommand/services/models.py
+++ b/pbcommand/services/models.py
@@ -1,5 +1,6 @@
 """Services Specific Data Models"""
 from collections import namedtuple
+import json
 import uuid
 
 import iso8601
@@ -38,7 +39,7 @@ PbsmrtpipeLogResource = LogResource(SERVICE_LOGGER_RESOURCE_ID, "Pbsmrtpipe",
                                     "Secondary Analysis Pbsmrtpipe Job logger")
 
 
-class ServiceJob(namedtuple("ServiceJob", 'id uuid name state path job_type created_at')):
+class ServiceJob(namedtuple("ServiceJob", 'id uuid name state path job_type created_at settings')):
 
     @staticmethod
     def from_d(d):
@@ -51,8 +52,12 @@ class ServiceJob(namedtuple("ServiceJob", 'id uuid name state path job_type crea
         def to_t(x):
             return iso8601.parse_date(se(x))
 
+        def to_d(x):
+            # the "jsonSettings" are a string for some stupid reason
+            return json.loads(sx(x))
+
         return ServiceJob(sx('id'), sx('uuid'), se('name'), se('state'),
-                          se('path'), se('jobTypeId'), to_t('createdAt'))
+                          se('path'), se('jobTypeId'), to_t('createdAt'), to_d('jsonSettings'))
 
     def was_successful(self):
         return self.state == JobStates.SUCCESSFUL
@@ -132,12 +137,13 @@ class JobEntryPoint(namedtuple("JobEntryPoint", "job_id dataset_uuid dataset_met
 
 
 class JobStates(object):
-    RUNNING = "RUNNING"
     CREATED = "CREATED"
+    SUBMITTED = "SUBMITTED"
+    RUNNING = "RUNNING"
     FAILED = "FAILED"
     SUCCESSFUL = "SUCCESSFUL"
 
-    ALL = (RUNNING, CREATED, FAILED)
+    ALL = (RUNNING, CREATED, FAILED, SUCCESSFUL, SUBMITTED)
 
     # End points
     ALL_COMPLETED = (FAILED, SUCCESSFUL)
diff --git a/pbcommand/services/service_access_layer.py b/pbcommand/services/service_access_layer.py
index e8e59fb..4fdb9d3 100644
--- a/pbcommand/services/service_access_layer.py
+++ b/pbcommand/services/service_access_layer.py
@@ -71,32 +71,32 @@ def _parse_base_service_error(response):
         return response
 
 
-def _process_rget(total_url):
+def _process_rget(total_url, ignore_errors=False):
     """Process get request and return JSON response. Raise if not successful"""
     r = rqget(total_url)
     _parse_base_service_error(r)
-    if not r.ok:
+    if not r.ok and not ignore_errors:
         log.error("Failed ({s}) GET to {x}".format(x=total_url, s=r.status_code))
     r.raise_for_status()
     j = r.json()
     return j
 
 
-def _process_rget_with_transform(func):
+def _process_rget_with_transform(func, ignore_errors=False):
     """Post process the JSON result (if successful) with F(json_d) -> T"""
     def wrapper(total_url):
-        j = _process_rget(total_url)
+        j = _process_rget(total_url, ignore_errors=ignore_errors)
         return func(j)
     return wrapper
 
 
-def _process_rget_with_jobs_transform(total_url):
+def _process_rget_with_jobs_transform(total_url, ignore_errors=False):
     # defining an internal method, because this used in several places
-    jobs_d = _process_rget(total_url)
+    jobs_d = _process_rget(total_url, ignore_errors=ignore_errors)
     return [ServiceJob.from_d(job_d) for job_d in jobs_d]
 
 
-def _process_rget_or_none(func):
+def _process_rget_or_none(func, ignore_errors=False):
     """
     apply the transform func to the output of GET request if it was successful, else returns None
 
@@ -105,7 +105,7 @@ def _process_rget_or_none(func):
     """
     def wrapper(total_url):
         try:
-            return _process_rget_with_transform(func)(total_url)
+            return _process_rget_with_transform(func, ignore_errors)(total_url)
         except (RequestException, SMRTServiceBaseError):
             # FIXME
             # this should be a tighter exception case
@@ -232,7 +232,8 @@ DATASET_METATYPES_TO_ENDPOINTS = {
     FileTypes.DS_BARCODE: "barcodes",
     FileTypes.DS_CCS: "ccsreads",
     FileTypes.DS_CONTIG: "contigs",
-    FileTypes.DS_ALIGN_CCS: "css-alignments"}
+    FileTypes.DS_ALIGN_CCS: "cssalignments",
+    FileTypes.DS_GMAP_REF: "gmapreferences"}
 
 
 def _get_endpoint_or_raise(ds_type):
@@ -273,10 +274,33 @@ def _to_datastore(dx):
     return DataStore(ds_files)
 
 
+def _to_job_report_files(dx):
+    return [{u"reportTypeId": d["reportTypeId"],
+             u"dataStoreFile": _to_ds_file(d["dataStoreFile"])} for d in dx]
+
+
 def _to_entry_points(d):
     return [JobEntryPoint.from_d(i) for i in d]
 
 
+def _get_all_report_attributes(sal_get_reports_func, sal_get_reports_details_func, job_id):
+    """Util func for getting report Attributes
+
+    Note, this assumes that only one report type has been created. This is
+    probably not a great idea. Should re-evaluate this.
+    """
+    report_datafiles = sal_get_reports_func(job_id)
+    report_uuids = [r.values()[0].uuid for r in report_datafiles]
+    reports = [sal_get_reports_details_func(job_id, r_uuid) for r_uuid in report_uuids]
+    all_report_attributes = {}
+
+    for r in reports:
+        for x in r['attributes']:
+            all_report_attributes[x['id']] = x['value']
+
+    return all_report_attributes
+
+
 class ServiceAccessLayer(object):
     """General Access Layer for interfacing with the job types on Secondary SMRT Server"""
 
@@ -351,19 +375,38 @@ class ServiceAccessLayer(object):
         """
         return self.get_job_by_type_and_id(JobTypes.PB_PIPE, job_id)
 
+    def get_import_job_by_id(self, job_id):
+        return self.get_job_by_type_and_id(JobTypes.IMPORT_DS, job_id)
+
     def get_analysis_job_datastore(self, job_id):
         """Get DataStore output from (pbsmrtpipe) analysis job"""
         # this doesn't work the list is sli
         return self._get_job_resource_type_with_transform(JobTypes.PB_PIPE, job_id, ServiceResourceTypes.DATASTORE, _to_datastore)
 
     def get_analysis_job_reports(self, job_id):
-        """Get Reports output from (pbsmrtpipe) analysis job"""
-        return self._get_job_resource_type_with_transform(JobTypes.PB_PIPE, job_id, ServiceResourceTypes.REPORTS, lambda x: x)
+        """Get list of DataStore ReportFile types output from (pbsmrtpipe) analysis job"""
+        return self._get_job_resource_type_with_transform(JobTypes.PB_PIPE, job_id, ServiceResourceTypes.REPORTS, _to_job_report_files)
 
     def get_analysis_job_report_details(self, job_id, report_uuid):
         _d = dict(t=JobTypes.PB_PIPE, i=job_id, r=ServiceResourceTypes.REPORTS, p=ServiceAccessLayer.ROOT_JOBS, u=report_uuid)
         return _process_rget_or_none(lambda x: x)(_to_url(self.uri, "{p}/{t}/{i}/{r}/{u}".format(**_d)))
 
+    def get_analysis_job_report_attrs(self, job_id):
+        """Return a dict of all the Report Attributes"""
+        return _get_all_report_attributes(self.get_analysis_job_reports, self.get_analysis_job_report_details, job_id)
+
+    def get_import_job_reports(self, job_id):
+        return self._get_job_resource_type_with_transform(JobTypes.IMPORT_DS, job_id, ServiceResourceTypes.REPORTS, _to_job_report_files)
+
+    def get_import_job_report_details(self, job_id, report_uuid):
+        # It would have been better to return a Report instance, not raw json
+        _d = dict(t=JobTypes.IMPORT_DS, i=job_id, r=ServiceResourceTypes.REPORTS, p=ServiceAccessLayer.ROOT_JOBS, u=report_uuid)
+        return _process_rget_or_none(lambda x: x)(_to_url(self.uri, "{p}/{t}/{i}/{r}/{u}".format(**_d)))
+
+    def get_import_job_report_attrs(self, job_id):
+        """Return a dict of all the Report Attributes"""
+        return _get_all_report_attributes(self.get_import_job_reports, self.get_import_job_report_details, job_id)
+
     def get_analysis_job_entry_points(self, job_id):
         return self._get_job_resource_type_with_transform(JobTypes.PB_PIPE, job_id, ServiceResourceTypes.ENTRY_POINTS, _to_entry_points)
 
@@ -411,6 +454,12 @@ class ServiceAccessLayer(object):
     def run_import_dataset_reference(self, path, time_out=10):
         return self._run_import_and_block(self.import_dataset_reference, path, time_out=time_out)
 
+    def import_dataset_barcode(self, path):
+        return self._import_dataset(FileTypes.DS_BARCODE, path)
+
+    def run_import_dataset_barcode(self, path, time_out=10):
+        return self._run_import_and_block(self.import_dataset_barcode, path, time_out=time_out)
+
     def run_import_local_dataset(self, path):
         """Import a file from FS that is local to where the services are running
 
@@ -419,17 +468,23 @@ class ServiceAccessLayer(object):
         :rtype: JobResult
         """
         dataset_meta_type = get_dataset_metadata(path)
+
         def _verify_dataset_in_list():
             file_type = FileTypes.ALL()[dataset_meta_type.metatype]
             ds_endpoint = _get_endpoint_or_raise(file_type)
+
+            # all datasets for a specific type
             datasets = self._get_datasets_by_type(ds_endpoint)
+
             uuids = {ds['uuid'] for ds in datasets}
-            if not dataset_meta_type.uuid in uuids:
-                 raise JobExeError(("Dataset {u} was imported but does not "+
+            if dataset_meta_type.uuid not in uuids:
+                raise JobExeError(("Dataset {u} was imported but does not "+
                                     "appear in the dataset list; this may "+
                                     "indicate XML schema errors.").format(
                                     u=dataset_meta_type.uuid))
-        result = self.get_dataset_by_uuid(dataset_meta_type.uuid)
+
+        result = self.get_dataset_by_uuid(dataset_meta_type.uuid,
+                                          ignore_errors=True)
         if result is None:
             log.info("Importing dataset {p}".format(p=path))
             job_result = self.run_import_dataset_by_type(dataset_meta_type.metatype, path)
@@ -450,12 +505,14 @@ class ServiceAccessLayer(object):
             # need to clean this up
             return JobResult(self.get_job_by_id(result['jobId']), 0, "")
 
-    def get_dataset_by_uuid(self, int_or_uuid):
+    def get_dataset_by_uuid(self, int_or_uuid, ignore_errors=False):
         """The recommend model is to look up DataSet type by explicit MetaType
 
         Returns None if the dataset was not found
         """
-        return _process_rget_or_none(_null_func)(_to_url(self.uri, "{p}/{i}".format(i=int_or_uuid, p=ServiceAccessLayer.ROOT_DS)))
+        return _process_rget_or_none(_null_func, ignore_errors=ignore_errors)(
+            _to_url(self.uri, "{p}/{i}".format(i=int_or_uuid,
+                                               p=ServiceAccessLayer.ROOT_DS)))
 
     def get_dataset_by_id(self, dataset_type, int_or_uuid):
         """Get a Dataset using the DataSetMetaType and (int|uuid) of the dataset"""
@@ -483,6 +540,12 @@ class ServiceAccessLayer(object):
     def get_referencesets(self):
         return self._get_datasets_by_type("references")
 
+    def get_barcodeset_by_id(self, int_or_uuid):
+        return self.get_dataset_by_id(FileTypes.DS_BARCODE, int_or_uuid)
+
+    def get_barcodesets(self):
+        return self._get_datasets_by_type("barcodes")
+
     def get_alignmentset_by_id(self, int_or_uuid):
         return self.get_dataset_by_id(FileTypes.DS_ALIGN, int_or_uuid)
 
@@ -538,7 +601,12 @@ class ServiceAccessLayer(object):
         # FIXME. Need to define this in the scenario IO layer.
         # workflow_options = [_to_o("woption_01", "value_01")]
         workflow_options = []
-        d = dict(name=name, pipelineId=pipeline_template_id, entryPoints=seps, taskOptions=task_options, workflowOptions=workflow_options)
+        d = dict(name=name,
+                 pipelineId=pipeline_template_id,
+                 entryPoints=seps,
+                 taskOptions=task_options,
+                 workflowOptions=workflow_options)
+
         raw_d = _process_rpost(_to_url(self.uri, "{r}/{p}".format(p=JobTypes.PB_PIPE, r=ServiceAccessLayer.ROOT_JOBS)), d)
         return ServiceJob.from_d(raw_d)
 
diff --git a/pbcommand/services/utils.py b/pbcommand/services/utils.py
index 33a8f11..472c614 100644
--- a/pbcommand/services/utils.py
+++ b/pbcommand/services/utils.py
@@ -102,10 +102,12 @@ def to_sal_summary(sal):
 
     x = outs.append
 
-    sep = "-" * 10
+    sep = "-" * 30
 
     x(repr(sal))
-    x("Status {s}".format(s=status['message']))
+    x("SystemId : {}".format(status['id']))
+    x("Version  : {}".format(status['version']))
+    x("Status   : {}".format(status['message']))
     x(sep)
     x(to_all_datasets_summary(sal, sep=sep))
     x(sep)
diff --git a/pbcommand/testkit/base_utils.py b/pbcommand/testkit/base_utils.py
old mode 100755
new mode 100644
diff --git a/pbcommand/testkit/core.py b/pbcommand/testkit/core.py
index c2ccc2b..8bff5b8 100644
--- a/pbcommand/testkit/core.py
+++ b/pbcommand/testkit/core.py
@@ -120,7 +120,7 @@ class PbTestApp(unittest.TestCase):
             self.assertTrue(opt in rtc.task.options, "Resolved option {x} not in RTC options.".format(x=opt))
             # this needs to support polymorphic equals (i.e., almostEquals
             if not isinstance(resolved_value, float):
-                emsg = "Resolved option {o} are not equal. Expected {a}, got {b}".format(o=opt, b=rtc.task.options[opt], a=resolved_value)
+                emsg = "Resolved option {o} are not equal. Expected '{a}', got '{b}'".format(o=opt, b=rtc.task.options[opt], a=resolved_value)
                 self.assertEquals(rtc.task.options[opt], resolved_value, emsg)
 
         # Resolved NPROC
diff --git a/pbcommand/utils.py b/pbcommand/utils.py
index e236f54..d9e4962 100644
--- a/pbcommand/utils.py
+++ b/pbcommand/utils.py
@@ -19,6 +19,7 @@ log.addHandler(logging.NullHandler())  # suppress the annoying no handlers msg
 
 
 class Constants(object):
+    """Log Level format strings"""
     LOG_FMT_ONLY_MSG = '%(message)s'
     LOG_FMT_ERR = '%(message)s'
     LOG_FMT_LVL = '[%(levelname)s] %(message)s'
@@ -142,7 +143,7 @@ def _get_console_and_file_logging_config_dict(console_level, console_formatter,
          'root': {'handlers': handlers.keys(), 'level': logging.DEBUG}
          }
 
-    #print pprint.pformat(d)
+    # print pprint.pformat(d)
     return d
 
 
@@ -175,13 +176,13 @@ def setup_log(alog,
               str_formatter=Constants.LOG_FMT_FULL):
     """Core Util to setup log handler
 
-    THIS NEEDS TO BE DEPRECATED
-
     :param alog: a log instance
     :param level: (int) Level of logging debug
     :param file_name: (str, None) if None, stdout is used, str write to file
     :param log_filter: (LogFilter, None)
     :param str_formatter: (str) log formatting str
+
+    .. warning:: THIS NEEDS TO BE DEPRECATED
     """
     setup_logger(file_name, level, formatter=str_formatter)
 
@@ -229,13 +230,15 @@ def log_traceback(alog, ex, ex_traceback):
 
     Example Usage (assuming you have a log instance in your scope)
 
-    try:
-        1 / 0
-    except Exception as e:
-        msg = "{i} failed validation. {e}".format(i=item, e=e)
-        log.error(msg)
-        _, _, ex_traceback = sys.exc_info()
-        log_traceback(log, e, ex_traceback)
+    :Example:
+
+    >>> try:
+    >>>    1 / 0
+    >>> except Exception as e:
+    >>>    msg = "{i} failed validation. {e}".format(i=item, e=e)
+    >>>    log.error(msg)
+    >>>    _, _, ex_traceback = sys.exc_info()
+    >>>    log_traceback(log, e, ex_traceback)
 
     """
 
@@ -244,11 +247,18 @@ def log_traceback(alog, ex, ex_traceback):
     alog.error(tb_text)
 
 
+def validate_type_or_raise(instance, type_or_types, error_prefix=None):
+    _d = dict(t=instance, x=type(instance), v=instance)
+    e = error_prefix if error_prefix is not None else ""
+    msg = e + "Expected type {t}. Got type {x} for {v}".format(**_d)
+    if not isinstance(instance, type_or_types):
+        raise TypeError(msg)
+    else:
+        return instance
+
+
 def _simple_validate_type(atype, instance):
-    if not isinstance(instance, atype):
-        _d = dict(t=atype, x=type(instance), v=instance)
-        raise TypeError("Expected type {t}. Got type {x} for {v}".format(**_d))
-    return instance
+    return validate_type_or_raise(instance, atype)
 
 _is_argparser_instance = functools.partial(_simple_validate_type, argparse.ArgumentParser)
 
@@ -267,11 +277,15 @@ def compose(*funcs):
 
     [f, g, h] will be f(g(h(x)))
 
-    fx = compose(f, g, h)
-
-    or
+    :Example:
 
-    fx = compose(*[f, g, h])
+    >>> f = lambda x: x * x
+    >>> g = lambda x: x + 1
+    >>> h = lambda x: x * 2
+    >>> funcs = [f, g, h]
+    >>> fgh = compose(*funcs)
+    >>> fgh(3) # 49
+    >>> compose(f, g, h)(3)
 
     """
     if not funcs:
@@ -292,9 +306,12 @@ def compose(*funcs):
 
 
 def which(exe_str):
-    """walk the exe_str in PATH to get current exe_str.
+    """walk the current PATH for exe_str to get the absolute path of the exe
 
-    If path is found, the full path is returned. Else it returns None.
+    :param exe_str: Executable name
+
+    :rtype: str | None
+    :returns Absolute path to the executable or None if the exe is not found
     """
     paths = os.environ.get('PATH', None)
     resolved_exe = None
@@ -317,6 +334,7 @@ def which(exe_str):
 
 
 def which_or_raise(cmd):
+    """Find exe in path or raise ExternalCommandNotFoundError"""
     resolved_cmd = which(cmd)
     if resolved_cmd is None:
         raise ExternalCommandNotFoundError("Unable to find required cmd '{c}'".format(c=cmd))
@@ -425,14 +443,18 @@ def ignored(*exceptions):
 
 def get_dataset_metadata(path):
     """
-    Returns DataSetMeta data or raises ValueError, KeyError
+    Returns DataSetMeta data or raises ValueError if dataset XML is missing
+    the required UniqueId and MetaType values.
 
-    :param path:
-    :return:
+    :param path: Path to DataSet XML
+    :raises: ValueError
+    :return: DataSetMetaData
     """
-    f = ET.parse(path).getroot().attrib
-    mt = f['MetaType']
-    uuid = f['UniqueId']
+    uuid = mt = None
+    for event, element in ET.iterparse(path, events=("start",)):
+        uuid = element.get("UniqueId")
+        mt = element.get("MetaType")
+        break
     if mt in FileTypes.ALL_DATASET_TYPES().keys():
         return DataSetMetaData(uuid, mt)
     else:
@@ -441,10 +463,11 @@ def get_dataset_metadata(path):
 
 def get_dataset_metadata_or_none(path):
     """
-    Returns DataSetMeta data, else None
+    Returns DataSetMeta data, else None if the file doesn't exist or a
+    processing of the XML raises.
 
-    :param path:
-    :return:
+    :param path: Path to DataSet XML
+    :return: DataSetMetaData or None
     """
     try:
         return get_dataset_metadata(path)
@@ -453,12 +476,19 @@ def get_dataset_metadata_or_none(path):
 
 
 def is_dataset(path):
-    """peek into the XML to get the MetaType"""
+    """peek into the XML to get the MetaType and verify that it's a valid dataset
+
+    :param path: Path to DataSet XML
+    """
     return get_dataset_metadata_or_none(path) is not None
 
 
 def walker(root_dir, file_filter_func):
-    """Filter files F(path) -> bool"""
+    """
+    Walk the file sytem and filter by the supplied filter function.
+
+    Filter function F(path) -> bool
+    """
     for root, dnames, fnames in os.walk(root_dir):
         for fname in fnames:
             path = os.path.join(root, fname)
diff --git a/pbcommand/validators.py b/pbcommand/validators.py
index 3d595bd..f7b177c 100644
--- a/pbcommand/validators.py
+++ b/pbcommand/validators.py
@@ -1,9 +1,12 @@
 import os
 import logging
 import functools
+
 from pbcommand.utils import nfs_exists_check
+from pbcommand.pb_io import load_report_from_json
 
 log = logging.getLogger(__name__)
+log.addHandler(logging.NullHandler()) # squash annoying no Handler msg
 
 
 def trigger_nfs_refresh(ff):
@@ -32,7 +35,6 @@ def validate_or(f1, f2, error_msg):
 
     :param error_msg: Default message to print
     """
-    @functools.wraps
     def wrapper(path):
         try:
             return f1(path)
@@ -92,3 +94,52 @@ def fofn_to_files(fofn):
         return list(bas_files)
     else:
         raise IOError("Unable to find FOFN {f}".format(f=fofn))
+
+
+def validate_report(file_name):
+    e = []
+    base_path = os.path.dirname(file_name)
+    r = load_report_from_json(file_name)
+    if r.title is None:
+        e.append("Report {i} is missing a title".format(i=r.id))
+    for t in r.tables:
+        if t.title is None:
+            e.append("Table {r.t} is missing a title".format(r=r.id, t=t.id))
+        for col in t.columns:
+            if col.header is None:
+                e.append("Column {r.t.c} is missing a header".format(
+                         r=r.id, t=t.id, c=col.id))
+        lengths = set([len(col.values) for col in t.columns])
+        if len(lengths) != 1:
+            e.append("Inconsistent column sizes in table {r.t}: {s}".format(
+                     r=r.id, t=t.id, s=",".join(
+                     [str(x) for x in sorted(list(lengths))])))
+    for pg in r.plotGroups:
+        if pg.title is None:
+            e.append("Plot group {r.g} is missing a title".format(
+                     r=r.id, g=pg.id))
+        for plot in pg.plots:
+            #if plot.caption is None:
+            #    raise ValueError("Plot {r.g.p} is missing a caption".format(
+            #                     r=r.id, g=pg.id, p=plot.id))
+            if plot.image is None:
+                e.append("Plot {r.g.p} does not have an image".format(
+                         r=r.id, g=pg.id, p=plot.id))
+            img_path = os.path.join(base_path, plot.image)
+            if not os.path.exists(img_path):
+                e.append("The plot image {f} does not exist".format(f=img_path))
+            if plot.thumbnail is None:
+                pass
+                #raise ValueError("Plot {r.g.p} does not have an thumbnail image".format(
+                #                 r=r.id, g=pg.id, p=plot.id))
+            else:
+                thumbnail = os.path.join(base_path, plot.thumbnail)
+                if not os.path.exists(thumbnail):
+                    e.append("The thumbnail image {f} does not exist".format(f=img_path))
+        if pg.thumbnail is not None:
+            thumbnail = os.path.join(base_path, pg.thumbnail)
+            if not os.path.exists(thumbnail):
+                e.append("The thumbnail image {f} does not exist".format(f=img_path))
+    if len(e) > 0:
+        raise ValueError("\n".join(e))
+    return r
diff --git a/setup.py b/setup.py
index 8a91df2..2f16d2c 100644
--- a/setup.py
+++ b/setup.py
@@ -48,7 +48,6 @@ setup(
     packages=find_packages(),
     package_data={"pbcommand": ["schemas/*.avsc"]},
     zip_safe=False,
-    entry_points={'console_scripts': ['pbservice = pbcommand.services.cli:main']},
     extras_require={"pbcore": ["pbcore", "ipython", "autopep8"],
                     "interactive": ['prompt_toolkit']},
     classifiers=['Development Status :: 4 - Beta',
diff --git a/tests/base_utils.py b/tests/base_utils.py
index e36b0c1..93a7372 100755
--- a/tests/base_utils.py
+++ b/tests/base_utils.py
@@ -5,6 +5,13 @@ from pbcommand.testkit.base_utils import *
 
 DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data'))
 
+DATA_DIR_TC = os.path.join(DATA_DIR, 'tool-contracts')
+DATA_DIR_TC_V1 = os.path.join(DATA_DIR, 'tool-contracts-v1')
+DATA_DIR_RTC = os.path.join(DATA_DIR, 'resolved-tool-contracts')
+DATA_DIR_PRESETS = os.path.join(DATA_DIR, "pipeline-presets")
+DATA_DIR_DSVIEW = os.path.join(DATA_DIR, "pipeline-datastore-view-rules")
+DATA_DIR_REPORT_SPECS = os.path.join(DATA_DIR, "report-specs")
+
 
 def get_data_file(path):
     return os.path.join(DATA_DIR, path)
@@ -12,3 +19,15 @@ def get_data_file(path):
 
 def get_data_file_from_subdir(subdir, path):
     return os.path.join(DATA_DIR, subdir, path)
+
+
+def get_tool_contract(name):
+    return os.path.join(DATA_DIR_TC, name)
+
+
+def get_tool_contract_v1(name):
+    return os.path.join(DATA_DIR_TC_V1, name)
+
+
+def get_resolved_tool_contract(name):
+    return os.path.join(DATA_DIR_RTC, name)
diff --git a/tests/data/dev_gather_fasta_app_tool_contract.json b/tests/data/dev_gather_fasta_app_tool_contract.json
deleted file mode 100644
index e532bec..0000000
--- a/tests/data/dev_gather_fasta_app_tool_contract.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
-    "version": "0.1.0", 
-    "driver": {
-        "serialization": "json", 
-        "exe": "python -m pbcommand.cli.examples.dev_scatter_fasta_app --resolved-tool-contract ", 
-        "env": {}
-    }, 
-    "tool_contract_id": "pbcommand.tasks.dev_gather_fasta", 
-    "tool_contract": {
-        "task_type": "pbsmrtpipe.task_types.gathered", 
-        "resource_types": [], 
-        "description": "Gather a fasta resources in a Chunk.json file", 
-        "schema_options": [], 
-        "output_types": [
-            {
-                "title": "Chunk JSON", 
-                "description": "Output Fasta", 
-                "default_name": "gathered", 
-                "id": "output", 
-                "file_type_id": "PacBio.FileTypes.Fasta"
-            }
-        ], 
-        "_comment": "Created by v0.2.14", 
-        "name": "Fasta Chunk Gather", 
-        "input_types": [
-            {
-                "description": "Chunked Fasta JSON Out", 
-                "title": "Chunk JSON", 
-                "id": "chunk_json", 
-                "file_type_id": "PacBio.FileTypes.CHUNK"
-            }
-        ], 
-        "nproc": 1, 
-        "is_distributed": false, 
-        "tool_contract_id": "pbcommand.tasks.dev_gather_fasta"
-    }
-}
diff --git a/tests/data/example-conditions/reseq-conditions-01.json b/tests/data/example-conditions/reseq-conditions-01.json
new file mode 100644
index 0000000..5660ab0
--- /dev/null
+++ b/tests/data/example-conditions/reseq-conditions-01.json
@@ -0,0 +1,24 @@
+{
+  "_condition_doc": "Example of a 'Resequencing' Condition Type",
+  "conditions": [
+    {
+      "condId": "cond_alpha",
+      "subreadset": "/path/to/subreadset-01.xml",
+      "alignmentset": "/path/to/alignmentset-A.xml",
+      "referenceset": "/path/to/reference.xml"
+    },
+    {
+      "condId": "cond_alpha",
+      "subreadset": "/path/to/subreadset-02.xml",
+      "alignmentset": "/path/to/alignmentset-B.xml",
+      "referenceset": "/path/to/reference.xml"
+    },
+    {
+      "condId": "cond_beta",
+      "subreadset": "/path/to/subreadset-03.xml",
+      "alignmentset": "/path/to/alignmentset-C.xml",
+      "referenceset": "/path/to/reference.xml"
+    }
+  ],
+  "pipelineId": "pbsmrtpipe.pipelines.my_pipeline"
+}
\ No newline at end of file
diff --git a/tests/data/example-conditions/reseq-conditions-02.json b/tests/data/example-conditions/reseq-conditions-02.json
new file mode 100644
index 0000000..504034e
--- /dev/null
+++ b/tests/data/example-conditions/reseq-conditions-02.json
@@ -0,0 +1,24 @@
+{
+  "_condition_doc": "Example of a 'Resequencing' Condition Type with Files that have relative paths",
+  "conditions": [
+    {
+      "condId": "cond_alpha",
+      "subreadset": "subreadset-01.xml",
+      "alignmentset": "alignmentset-A.xml",
+      "referenceset": "reference.xml"
+    },
+    {
+      "condId": "cond_alpha",
+      "subreadset": "subreadset-02.xml",
+      "alignmentset": "alignmentset-B.xml",
+      "referenceset": "reference.xml"
+    },
+    {
+      "condId": "cond_beta",
+      "subreadset": "subreadset-03.xml",
+      "alignmentset": "alignmentset-C.xml",
+      "referenceset": "reference.xml"
+    }
+  ],
+  "pipelineId": "pbsmrtpipe.pipelines.my_pipeline"
+}
\ No newline at end of file
diff --git a/tests/data/example-reports/example_version_1_0_0.json b/tests/data/example-reports/example_version_1_0_0.json
new file mode 100644
index 0000000..82317c5
--- /dev/null
+++ b/tests/data/example-reports/example_version_1_0_0.json
@@ -0,0 +1,31 @@
+{
+  "_comment": "Example of v 1.0.0 Report schema.",
+  "tables": [],
+  "uuid": "196136c8-f6fd-11e5-b481-3c15c2cc8f88",
+  "version": "1.0.0",
+  "attributes": [
+    {
+      "name": "SMRT Cells",
+      "value": 1,
+      "id": "overview.ncells"
+    }
+  ],
+  "id": "my_example",
+  "title": "Example Report",
+  "plotGroups": [
+    {
+      "id": "adapter.observed_insert_length_distribution",
+      "thumbnail": "adapter_observed_insert_length_distribution_thumb.png",
+      "plots": [
+        {
+          "title": "My Plot",
+          "caption": null,
+          "image": "adapter_observed_insert_length_distribution.png",
+          "id": "adapter.observed_insert_length_distribution.plot1"
+        }
+      ],
+      "legend": null,
+      "title": "Observed Insert Length Distribution"
+    }
+  ]
+}
\ No newline at end of file
diff --git a/tests/data/example-reports/example_with_plot.json b/tests/data/example-reports/example_with_plot.json
new file mode 100644
index 0000000..4c3ccc6
--- /dev/null
+++ b/tests/data/example-reports/example_with_plot.json
@@ -0,0 +1,24 @@
+{
+  "uuid": "2fcb60de-3b20-11e6-b559-3c15c2cc8f88",
+  "dataset_uuids": [],
+  "tables": [],
+  "version": "1.0.0",
+  "attributes": [],
+  "id": "adapter",
+  "plotGroups": [
+    {
+      "id": "adapter.observed_insert_length_distribution",
+      "thumbnail": "adapter_observed_insert_length_distribution_thumb.png",
+      "plots": [
+        {
+          "title": "My Plot",
+          "caption": null,
+          "image": "adapter_observed_insert_length_distribution.png",
+          "id": "adapter.observed_insert_length_distribution.plot1"
+        }
+      ],
+      "legend": null,
+      "title": "Observed Insert Length Distribution"
+    }
+  ]
+}
\ No newline at end of file
diff --git a/tests/data/example-reports/overview.json b/tests/data/example-reports/overview.json
index c003db8..914970d 100644
--- a/tests/data/example-reports/overview.json
+++ b/tests/data/example-reports/overview.json
@@ -1,6 +1,7 @@
 {
   "tables": [],
-  "_comment": "Manually updated by MK for 0.3.9",
+  "_comment": "Manually updated by MK for 0.3.9 and Added UUID for 0.3.24",
+  "uuid": "196136c8-f6fd-11e5-b481-3c15c2cc8f88",
   "_version": "0.3.9",
   "_changelist": 127707,
   "attributes": [
diff --git a/tests/data/example-reports/test_report.json b/tests/data/example-reports/test_report.json
new file mode 100644
index 0000000..8c0cb60
--- /dev/null
+++ b/tests/data/example-reports/test_report.json
@@ -0,0 +1,40 @@
+{
+  "id": "test_report",
+  "version": "0.1",
+  "title": "Example report for comparing to specification",
+  "attributes": [
+    {
+      "id": "attribute1",
+      "name": "Attribute 1",
+      "value": 123456789
+    },
+    {
+      "id": "attribute2",
+      "name": "Attribute 2",
+      "value": 0.987654321
+    },
+    {
+      "id": "attribute3",
+      "name": "Attribute 3",
+      "value": true
+    },
+    {
+      "id": "attribute4",
+      "name": "Attribute 4",
+      "value": "qwerty"
+    }
+  ],
+  "tables": [
+    {
+      "id": "table1",
+      "title": "Table 1",
+      "columns": [
+        {
+          "id": "column1",
+          "header": "Column 1",
+          "values": [1,2,3,4,5,6]
+        }
+      ]
+    }
+  ]
+}
diff --git a/tests/data/example-reports/test_report2.json b/tests/data/example-reports/test_report2.json
new file mode 100644
index 0000000..bb61c34
--- /dev/null
+++ b/tests/data/example-reports/test_report2.json
@@ -0,0 +1,72 @@
+{
+  "id": "test_report",
+  "version": "1.0.0",
+  "title": null,
+  "attributes": [
+    {
+      "id": "test_report.attribute1",
+      "name": null,
+      "value": 123456789
+    },
+    {
+      "id": "test_report.attribute2",
+      "name": null,
+      "value": 0.987654321
+    },
+    {
+      "id": "test_report.attribute3",
+      "name": null,
+      "value": true
+    },
+    {
+      "id": "test_report.attribute4",
+      "name": null,
+      "value": "qwerty"
+    }
+  ],
+  "tables": [
+    {
+      "id": "test_report.table1",
+      "title": null,
+      "columns": [
+        {
+          "id": "test_report.table1.column1",
+          "header": null,
+          "values": [1,2,3,4,5,6]
+        }
+      ]
+    }
+  ],
+  "plotGroups": [
+    {
+      "id": "test_report.plotgroup1",
+      "title": null,
+      "plots": [
+        {
+          "id": "test_report.plotgroup1.plot1",
+          "image": "unknown.png",
+          "title": null,
+          "caption": null
+        }
+      ]
+    },
+    {
+      "id": "test_report.plotgroup2",
+      "title": null,
+      "plots": [
+        {
+          "id": "test_report.plotgroup2.plot1",
+          "image": "unknown.png",
+          "title": null,
+          "caption": null
+        },
+        {
+          "id": "test_report.plotgroup2.plot2",
+          "image": "unknown.png",
+          "title": null,
+          "caption": null
+        }
+      ]
+    }
+  ]
+}
diff --git a/tests/data/pbcommand.tasks.dev_txt_custom_outs_tool_contract.json b/tests/data/pbcommand.tasks.dev_txt_custom_outs_tool_contract.json
deleted file mode 100644
index add091c..0000000
--- a/tests/data/pbcommand.tasks.dev_txt_custom_outs_tool_contract.json
+++ /dev/null
@@ -1,44 +0,0 @@
-{
-    "driver": {
-        "env": {}, 
-        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ", 
-        "serialization": "json"
-    }, 
-    "tool_contract": {
-        "_comment": "Created by v0.2.14", 
-        "description": "Quick tool dev_txt_custom_outs pbcommand.tasks.dev_txt_custom_outs", 
-        "input_types": [
-            {
-                "description": "description for PacBio.FileTypes.txt_0", 
-                "file_type_id": "PacBio.FileTypes.txt", 
-                "id": "Label PacBio.FileTypes.txt_0", 
-                "title": "<FileType id=PacBio.FileTypes.txt name=file.txt >"
-            }
-        ], 
-        "is_distributed": true, 
-        "name": "Tool dev_txt_custom_outs", 
-        "nproc": 1, 
-        "output_types": [
-            {
-                "default_name": "PacBio.FileTypes.txt_file_0", 
-                "description": "File <FileType id=PacBio.FileTypes.txt name=file.txt >", 
-                "file_type_id": "PacBio.FileTypes.txt", 
-                "id": "label_PacBio.FileTypes.txt", 
-                "title": "<FileType id=PacBio.FileTypes.txt name=file.txt >"
-            }, 
-            {
-                "default_name": "PacBio.FileTypes.txt_file_1", 
-                "description": "File <FileType id=PacBio.FileTypes.txt name=file.txt >", 
-                "file_type_id": "PacBio.FileTypes.txt", 
-                "id": "label_PacBio.FileTypes.txt", 
-                "title": "<FileType id=PacBio.FileTypes.txt name=file.txt >"
-            }
-        ], 
-        "resource_types": [], 
-        "schema_options": [], 
-        "task_type": "pbsmrtpipe.task_types.standard", 
-        "tool_contract_id": "pbcommand.tasks.dev_txt_custom_outs"
-    }, 
-    "tool_contract_id": "pbcommand.tasks.dev_txt_custom_outs", 
-    "version": "0.1.0"
-}
diff --git a/tests/data/pbcommand.tasks.dev_txt_hello_tool_contract.json b/tests/data/pbcommand.tasks.dev_txt_hello_tool_contract.json
deleted file mode 100644
index 6e94ae7..0000000
--- a/tests/data/pbcommand.tasks.dev_txt_hello_tool_contract.json
+++ /dev/null
@@ -1,44 +0,0 @@
-{
-    "driver": {
-        "env": {}, 
-        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ", 
-        "serialization": "json"
-    }, 
-    "tool_contract": {
-        "_comment": "Created by v0.2.14", 
-        "description": "Quick tool dev_txt_hello pbcommand.tasks.dev_txt_hello", 
-        "input_types": [
-            {
-                "description": "description for PacBio.FileTypes.txt_0", 
-                "file_type_id": "PacBio.FileTypes.txt", 
-                "id": "Label PacBio.FileTypes.txt_0", 
-                "title": "<FileType id=PacBio.FileTypes.txt name=file.txt >"
-            }
-        ], 
-        "is_distributed": false, 
-        "name": "Tool dev_txt_hello", 
-        "nproc": 3, 
-        "output_types": [
-            {
-                "default_name": "file", 
-                "description": "description for <FileType id=PacBio.FileTypes.txt name=file.txt >", 
-                "file_type_id": "PacBio.FileTypes.txt", 
-                "id": "Label PacBio.FileTypes.txt_0", 
-                "title": "<FileType id=PacBio.FileTypes.txt name=file.txt >"
-            }, 
-            {
-                "default_name": "file", 
-                "description": "description for <FileType id=PacBio.FileTypes.txt name=file.txt >", 
-                "file_type_id": "PacBio.FileTypes.txt", 
-                "id": "Label PacBio.FileTypes.txt_1", 
-                "title": "<FileType id=PacBio.FileTypes.txt name=file.txt >"
-            }
-        ], 
-        "resource_types": [], 
-        "schema_options": [], 
-        "task_type": "pbsmrtpipe.task_types.standard", 
-        "tool_contract_id": "pbcommand.tasks.dev_txt_hello"
-    }, 
-    "tool_contract_id": "pbcommand.tasks.dev_txt_hello", 
-    "version": "0.1.0"
-}
diff --git a/tests/data/pipeline-datastore-view-rules/rules_01.json b/tests/data/pipeline-datastore-view-rules/rules_01.json
new file mode 100644
index 0000000..61d4742
--- /dev/null
+++ b/tests/data/pipeline-datastore-view-rules/rules_01.json
@@ -0,0 +1,20 @@
+{
+  "pipelineId": "pbsmrtpipe.pipelines.sa3_sat",
+  "smrtlinkVersion": "3.2",
+  "rules": [
+    {
+      "sourceId": "pbreports.tasks.sat_report-out-0",
+      "fileTypeId": "PacBio.FileTypes.JsonReport",
+      "isHidden": true,
+      "name": "Site Acceptance Test",
+      "description": "JSON report for PacBio site acceptance test"
+    },
+    {
+      "sourceId": "pbreports.tasks.top_variants-out-0",
+      "fileTypeId": "PacBio.FileTypes.JsonReport",
+      "isHidden": true,
+      "name": null,
+      "description": null
+    }
+  ]
+}
diff --git a/tests/data/pipeline-presets/example-pipeline-presets.json b/tests/data/pipeline-presets/example-pipeline-presets.json
new file mode 100644
index 0000000..3122694
--- /dev/null
+++ b/tests/data/pipeline-presets/example-pipeline-presets.json
@@ -0,0 +1,17 @@
+{
+  "_comment": "Resolved Pipeline Template Preset JSON format",
+  "pipelineId": "pbsmrtpipe.pipelines.dev_a",
+  "presetId": "pbsmrtpipe.pipeline_preset.settings_01",
+  "name": "Pipeline Template Preset Name",
+  "description": "Description of preset is required",
+  "options": {
+    "pbsmrtpipe.options.max_nchunks": 10,
+    "pbsmrtpipe.options.chunk_mode": true
+  },
+  "taskOptions": {
+    "pbcommand.task_options.num_records": 51,
+    "pbcommand.task_options.alpha": 1.234,
+    "pbcommand.task_options.beta": false,
+    "pbcommand.task_options.gamma": "this is a string parameter"
+  }
+}
diff --git a/tests/data/report-specs/report_spec.json b/tests/data/report-specs/report_spec.json
new file mode 100644
index 0000000..4521810
--- /dev/null
+++ b/tests/data/report-specs/report_spec.json
@@ -0,0 +1,114 @@
+{
+    "id": "test_report",
+    "version": "0.1",
+    "title": "Example report spec",
+    "description": "This is a small test report which is used to ensure report_spec.py is working",
+    "attributes": [
+        {
+            "description": "An attribute of type int",
+            "type": "int",
+            "id": "attribute1",
+            "name": "Attribute 1",
+            "format": "{:,d}"
+        },
+        {
+            "description": "An attribute of type float",
+            "type": "float",
+            "id": "attribute2",
+            "name": "Attribute 2",
+            "format": "{p:5g} %"
+        },
+        {
+            "description": "An attribute of type bool",
+            "type": "boolean",
+            "id": "attribute3",
+            "name": "Attribute 3"
+        },
+        {
+            "description": "An attribute of type string",
+            "type": "string",
+            "id": "attribute4",
+            "name": "Attribute 4",
+            "format": null
+        }
+    ],
+    "tables": [
+        {
+            "id": "table1",
+            "title": "Table 1",
+            "description": "The first table",
+            "columns": [
+                {
+                    "header": "Column 1",
+                    "type": "int",
+                    "id": "column1",
+                    "description": "A column of type int",
+                    "format": "{:d}"
+                }
+            ]
+        },
+        {
+            "id": "table2",
+            "title": "Table 2",
+            "description": "The second table",
+            "columns": [
+                {
+                    "header": "Column 1",
+                    "type": "float",
+                    "id": "column1",
+                    "description": "A column of type float",
+                    "format": "{:.2f}"
+                },
+                {
+                    "header": "Column 2",
+                    "type": "string",
+                    "id": "column2",
+                    "description": "A column of type str",
+                    "format": null
+                }
+            ]
+        }
+    ],
+    "plotGroups": [
+        {
+            "plots": [
+                {
+                    "description": "The first plot of the first plotgroup",
+                    "title": "Plot 1",
+                    "caption": "Plot 1",
+                    "xlabel": "x variable",
+                    "ylabel": "y variable",
+                    "id": "plot1"
+                }
+            ],
+            "description": "The first plotgroup",
+            "legend": "legend1.png",
+            "id": "plotgroup1",
+            "title": "Plotgroup 1"
+        },
+        {
+            "plots": [
+                {
+                    "description": "The first plot of the second plotgroup",
+                    "title": "Plot 1",
+                    "caption": "Plot 1",
+                    "xlabel": "x variable",
+                    "ylabel": "y variable",
+                    "id": "plot1"
+                },
+                {
+                    "description": "The second plot of the second plotgroup",
+                    "title": "Plot 2",
+                    "caption": "Plot 2",
+                    "xlabel": "x variable",
+                    "ylabel": "y variable",
+                    "id": "plot2"
+                }
+            ],
+            "description": "The second plotgroup",
+            "legend": "legend2.png",
+            "id": "plotgroup2",
+            "title": "Plotgroup 2"
+        }
+    ]
+}
diff --git a/tests/data/dev_example_resolved_tool_contract.json b/tests/data/resolved-tool-contracts/dev_example_resolved_tool_contract.json
similarity index 100%
rename from tests/data/dev_example_resolved_tool_contract.json
rename to tests/data/resolved-tool-contracts/dev_example_resolved_tool_contract.json
diff --git a/tests/data/resolved-tool-contracts/dev_mixed_app_resolved_tool_contract.json b/tests/data/resolved-tool-contracts/dev_mixed_app_resolved_tool_contract.json
new file mode 100644
index 0000000..a10c07b
--- /dev/null
+++ b/tests/data/resolved-tool-contracts/dev_mixed_app_resolved_tool_contract.json
@@ -0,0 +1,28 @@
+{
+    "driver": {
+        "env": {},
+        "exe": "python -m pbcommand.cli.examples.dev_mixed_app --resolved-tool-contract ",
+        "serialization": "json"
+    },
+    "resolved_tool_contract": {
+        "_comment": "Created by pbcommand v0.5.0",
+        "input_files": [
+            "tests/data/example.txt"
+        ],
+        "is_distributed": false,
+        "log_level": "INFO",
+        "nproc": 1,
+        "options": {
+            "pbcommand.task_options.alpha": 50,
+            "pbcommand.task_options.beta": 9.876,
+            "pbcommand.task_options.gamma": false,
+            "pbcommand.task_options.ploidy": "diploid"
+        },
+        "output_files": [
+            "example.report.json"
+        ],
+        "resources": [],
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "tool_contract_id": "pbcommand.tasks.dev_mixed_app"
+    }
+}
diff --git a/tests/data/resolved_contract_01.json b/tests/data/resolved-tool-contracts/resolved_contract_01.json
similarity index 100%
rename from tests/data/resolved_contract_01.json
rename to tests/data/resolved-tool-contracts/resolved_contract_01.json
diff --git a/tests/data/resolved_tool_contract_dev_app.json b/tests/data/resolved-tool-contracts/resolved_tool_contract_dev_app.json
similarity index 100%
rename from tests/data/resolved_tool_contract_dev_app.json
rename to tests/data/resolved-tool-contracts/resolved_tool_contract_dev_app.json
diff --git a/tests/data/dev_example_dev_txt_app_tool_contract.json b/tests/data/tool-contracts-v1/dev_example_dev_txt_app_tool_contract.json
similarity index 52%
rename from tests/data/dev_example_dev_txt_app_tool_contract.json
rename to tests/data/tool-contracts-v1/dev_example_dev_txt_app_tool_contract.json
index 6620f43..41ef959 100644
--- a/tests/data/dev_example_dev_txt_app_tool_contract.json
+++ b/tests/data/tool-contracts-v1/dev_example_dev_txt_app_tool_contract.json
@@ -1,65 +1,65 @@
 {
-    "version": "0.1.0", 
+    "version": "0.1.0",
     "driver": {
-        "serialization": "json", 
-        "exe": "python -m pbcommand.cli.examples.dev_app --resolved-tool-contract ", 
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.examples.dev_app --resolved-tool-contract ",
         "env": {}
-    }, 
-    "tool_contract_id": "pbcommand.tasks.dev_txt_app", 
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_txt_app",
     "tool_contract": {
-        "task_type": "pbsmrtpipe.task_types.standard", 
+        "task_type": "pbsmrtpipe.task_types.standard",
         "resource_types": [
-            "$tmpfile", 
-            "$tmpfile", 
+            "$tmpfile",
+            "$tmpfile",
             "$tmpdir"
-        ], 
-        "description": "Dev app for Testing that supports emitting tool contracts", 
+        ],
+        "description": "Dev app for Testing that supports emitting tool contracts",
         "schema_options": [
             {
                 "pb_option": {
-                    "default": 10, 
-                    "type": "integer", 
-                    "option_id": "pbcommand.task_options.dev_max_nlines", 
-                    "name": "Max Lines", 
+                    "default": 10,
+                    "type": "integer",
+                    "option_id": "pbcommand.task_options.dev_max_nlines",
+                    "name": "Max Lines",
                     "description": "Max Number of lines to Copy"
-                }, 
-                "title": "JSON Schema for pbcommand.task_options.dev_max_nlines", 
+                },
+                "title": "JSON Schema for pbcommand.task_options.dev_max_nlines",
                 "required": [
                     "pbcommand.task_options.dev_max_nlines"
-                ], 
-                "$schema": "http://json-schema.org/draft-04/schema#", 
-                "type": "object", 
+                ],
+                "$schema": "http://json-schema.org/draft-04/schema#",
+                "type": "object",
                 "properties": {
                     "pbcommand.task_options.dev_max_nlines": {
-                        "default": 10, 
-                        "type": "integer", 
-                        "description": "Max Number of lines to Copy", 
+                        "default": 10,
+                        "type": "integer",
+                        "description": "Max Number of lines to Copy",
                         "title": "Max Lines"
                     }
                 }
             }
-        ], 
+        ],
         "output_types": [
             {
-                "title": "Txt outfile", 
-                "description": "Generic Output Txt file", 
-                "default_name": "output", 
-                "id": "txt_out", 
+                "title": "Txt outfile",
+                "description": "Generic Output Txt file",
+                "default_name": "output",
+                "id": "txt_out",
                 "file_type_id": "PacBio.FileTypes.txt"
             }
-        ], 
-        "_comment": "Created by v0.2.14", 
-        "name": "Txt App", 
+        ],
+        "_comment": "Created by v0.4.9",
+        "name": "Txt App",
         "input_types": [
             {
-                "description": "Generic Text File", 
-                "title": "Txt file", 
-                "id": "txt_in", 
+                "description": "Generic Text File",
+                "title": "Txt file",
+                "id": "txt_in",
                 "file_type_id": "PacBio.FileTypes.txt"
             }
-        ], 
-        "nproc": 1, 
-        "is_distributed": false, 
+        ],
+        "nproc": 1,
+        "is_distributed": false,
         "tool_contract_id": "pbcommand.tasks.dev_txt_app"
     }
 }
diff --git a/tests/data/dev_example_tool_contract.json b/tests/data/tool-contracts-v1/dev_example_tool_contract.json
similarity index 52%
copy from tests/data/dev_example_tool_contract.json
copy to tests/data/tool-contracts-v1/dev_example_tool_contract.json
index e151daf..5035493 100644
--- a/tests/data/dev_example_tool_contract.json
+++ b/tests/data/tool-contracts-v1/dev_example_tool_contract.json
@@ -1,65 +1,65 @@
 {
-    "version": "0.2.1", 
+    "version": "0.2.1",
     "driver": {
-        "serialization": "json", 
-        "exe": "python -m pbcommand.cli.example.dev_app --resolved-tool-contract ", 
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.example.dev_app --resolved-tool-contract ",
         "env": {}
-    }, 
-    "tool_contract_id": "pbcommand.tasks.dev_app", 
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_app",
     "tool_contract": {
-        "task_type": "pbsmrtpipe.task_types.standard", 
+        "task_type": "pbsmrtpipe.task_types.standard",
         "resource_types": [
-            "$tmpfile", 
-            "$tmpfile", 
+            "$tmpfile",
+            "$tmpfile",
             "$tmpdir"
-        ], 
-        "description": "Dev app for Testing that supports emitting tool contracts", 
+        ],
+        "description": "Dev app for Testing that supports emitting tool contracts",
         "schema_options": [
             {
                 "pb_option": {
-                    "default": 25, 
-                    "type": "integer", 
-                    "option_id": "pbcommand.task_options.dev_read_length", 
-                    "name": "Length filter", 
+                    "default": 25,
+                    "type": "integer",
+                    "option_id": "pbcommand.task_options.dev_read_length",
+                    "name": "Length filter",
                     "description": "Min Sequence Length filter"
-                }, 
-                "title": "JSON Schema for pbcommand.task_options.dev_read_length", 
+                },
+                "title": "JSON Schema for pbcommand.task_options.dev_read_length",
                 "required": [
                     "pbcommand.task_options.dev_read_length"
-                ], 
-                "$schema": "http://json-schema.org/draft-04/schema#", 
-                "type": "object", 
+                ],
+                "$schema": "http://json-schema.org/draft-04/schema#",
+                "type": "object",
                 "properties": {
                     "pbcommand.task_options.dev_read_length": {
-                        "default": 25, 
-                        "type": "integer", 
-                        "description": "Min Sequence Length filter", 
+                        "default": 25,
+                        "type": "integer",
+                        "description": "Min Sequence Length filter",
                         "title": "Length filter"
                     }
                 }
             }
-        ], 
+        ],
         "output_types": [
             {
-                "title": "Filtered Fasta file", 
-                "description": "Filtered Fasta file", 
-                "default_name": "filter", 
-                "id": "fasta_out", 
+                "title": "Filtered Fasta file",
+                "description": "Filtered Fasta file",
+                "default_name": "filter",
+                "id": "fasta_out",
                 "file_type_id": "PacBio.FileTypes.Fasta"
             }
-        ], 
-        "_comment": "Created by v0.2.14", 
-        "name": "Example Dev App", 
+        ],
+        "_comment": "Created by v0.4.4",
+        "name": "Example Dev App",
         "input_types": [
             {
-                "description": "PacBio Spec'ed fasta file", 
-                "title": "Fasta File", 
-                "id": "fasta_in", 
+                "description": "PacBio Spec'ed fasta file",
+                "title": "Fasta File",
+                "id": "fasta_in",
                 "file_type_id": "PacBio.FileTypes.Fasta"
             }
-        ], 
-        "nproc": 1, 
-        "is_distributed": false, 
+        ],
+        "nproc": 1,
+        "is_distributed": false,
         "tool_contract_id": "pbcommand.tasks.dev_app"
     }
 }
diff --git a/tests/data/tool-contracts-v1/dev_gather_fasta_app_tool_contract.json b/tests/data/tool-contracts-v1/dev_gather_fasta_app_tool_contract.json
new file mode 100644
index 0000000..f1fec50
--- /dev/null
+++ b/tests/data/tool-contracts-v1/dev_gather_fasta_app_tool_contract.json
@@ -0,0 +1,37 @@
+{
+    "version": "0.1.0",
+    "driver": {
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.examples.dev_scatter_fasta_app --resolved-tool-contract ",
+        "env": {}
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_gather_fasta",
+    "tool_contract": {
+        "task_type": "pbsmrtpipe.task_types.gathered",
+        "resource_types": [],
+        "description": "Gather a fasta resources in a Chunk.json file",
+        "schema_options": [],
+        "output_types": [
+            {
+                "title": "Chunk JSON",
+                "description": "Output Fasta",
+                "default_name": "gathered",
+                "id": "output",
+                "file_type_id": "PacBio.FileTypes.Fasta"
+            }
+        ],
+        "_comment": "Created by v0.4.9",
+        "name": "Fasta Chunk Gather",
+        "input_types": [
+            {
+                "description": "Chunked Fasta JSON Out",
+                "title": "Chunk JSON",
+                "id": "chunk_json",
+                "file_type_id": "PacBio.FileTypes.CHUNK"
+            }
+        ],
+        "nproc": 1,
+        "is_distributed": false,
+        "tool_contract_id": "pbcommand.tasks.dev_gather_fasta"
+    }
+}
diff --git a/tests/data/tool-contracts-v1/dev_mixed_app_tool_contract.json b/tests/data/tool-contracts-v1/dev_mixed_app_tool_contract.json
new file mode 100644
index 0000000..608f35d
--- /dev/null
+++ b/tests/data/tool-contracts-v1/dev_mixed_app_tool_contract.json
@@ -0,0 +1,224 @@
+{
+    "version": "0.2.0",
+    "driver": {
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.examples.dev_mixed_app --resolved-tool-contract ",
+        "env": {}
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_mixed_app",
+    "tool_contract": {
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "resource_types": [],
+        "description": "Dev app for Testing that supports emitting tool contracts",
+        "schema_options": [
+            {
+                "pb_option": {
+                    "name": "Alpha",
+                    "default": 25,
+                    "option_id": "pbcommand.task_options.alpha",
+                    "choices": null,
+                    "optionTypeId": "pbsmrtpipe.option_types.integer",
+                    "type": "integer",
+                    "description": "Alpha description"
+                },
+                "title": "JSON Schema for pbcommand.task_options.alpha",
+                "required": [
+                    "pbcommand.task_options.alpha"
+                ],
+                "$schema": "http://json-schema.org/draft-04/schema#",
+                "type": "object",
+                "properties": {
+                    "pbcommand.task_options.alpha": {
+                        "default": 25,
+                        "type": "integer",
+                        "description": "Alpha description",
+                        "title": "Alpha"
+                    }
+                }
+            },
+            {
+                "pb_option": {
+                    "name": "Beta",
+                    "default": 1.234,
+                    "option_id": "pbcommand.task_options.beta",
+                    "choices": null,
+                    "optionTypeId": "pbsmrtpipe.option_types.float",
+                    "type": "number",
+                    "description": "Beta description"
+                },
+                "title": "JSON Schema for pbcommand.task_options.beta",
+                "required": [
+                    "pbcommand.task_options.beta"
+                ],
+                "$schema": "http://json-schema.org/draft-04/schema#",
+                "type": "object",
+                "properties": {
+                    "pbcommand.task_options.beta": {
+                        "default": 1.234,
+                        "type": "number",
+                        "description": "Beta description",
+                        "title": "Beta"
+                    }
+                }
+            },
+            {
+                "pb_option": {
+                    "name": "Gamma",
+                    "default": true,
+                    "option_id": "pbcommand.task_options.gamma",
+                    "choices": null,
+                    "optionTypeId": "pbsmrtpipe.option_types.boolean",
+                    "type": "boolean",
+                    "description": "Gamma description"
+                },
+                "title": "JSON Schema for pbcommand.task_options.gamma",
+                "required": [
+                    "pbcommand.task_options.gamma"
+                ],
+                "$schema": "http://json-schema.org/draft-04/schema#",
+                "type": "object",
+                "properties": {
+                    "pbcommand.task_options.gamma": {
+                        "default": true,
+                        "type": "boolean",
+                        "description": "Gamma description",
+                        "title": "Gamma"
+                    }
+                }
+            },
+            {
+                "pb_option": {
+                    "name": "Ploidy",
+                    "default": "haploid",
+                    "option_id": "pbcommand.task_options.ploidy",
+                    "choices": [
+                        "haploid",
+                        "diploid"
+                    ],
+                    "optionTypeId": "pbsmrtpipe.option_types.string",
+                    "type": "string",
+                    "description": "Genome ploidy"
+                },
+                "title": "JSON Schema for pbcommand.task_options.ploidy",
+                "required": [
+                    "pbcommand.task_options.ploidy"
+                ],
+                "$schema": "http://json-schema.org/draft-04/schema#",
+                "type": "object",
+                "properties": {
+                    "pbcommand.task_options.ploidy": {
+                        "default": "haploid",
+                        "type": "string",
+                        "description": "Genome ploidy",
+                        "title": "Ploidy"
+                    }
+                }
+            },
+            {
+                "pb_option": {
+                    "name": "Delta",
+                    "default": 1,
+                    "option_id": "pbcommand.task_options.delta",
+                    "choices": [
+                        1,
+                        2,
+                        3
+                    ],
+                    "optionTypeId": "pbsmrtpipe.option_types.choice_int",
+                    "type": "integer",
+                    "description": "An integer choice"
+                },
+                "title": "JSON Schema for pbcommand.task_options.delta",
+                "required": [
+                    "pbcommand.task_options.delta"
+                ],
+                "$schema": "http://json-schema.org/draft-04/schema#",
+                "type": "object",
+                "properties": {
+                    "pbcommand.task_options.delta": {
+                        "default": 1,
+                        "type": "integer",
+                        "description": "An integer choice",
+                        "title": "Delta"
+                    }
+                }
+            },
+            {
+                "pb_option": {
+                    "name": "Epsilon",
+                    "default": 0.1,
+                    "option_id": "pbcommand.task_options.epsilon",
+                    "choices": [
+                        0.01,
+                        0.1,
+                        1.0
+                    ],
+                    "optionTypeId": "pbsmrtpipe.option_types.choice_float",
+                    "type": "number",
+                    "description": "A float choice"
+                },
+                "title": "JSON Schema for pbcommand.task_options.epsilon",
+                "required": [
+                    "pbcommand.task_options.epsilon"
+                ],
+                "$schema": "http://json-schema.org/draft-04/schema#",
+                "type": "object",
+                "properties": {
+                    "pbcommand.task_options.epsilon": {
+                        "default": 0.1,
+                        "type": "number",
+                        "description": "A float choice",
+                        "title": "Epsilon"
+                    }
+                }
+            },
+            {
+                "pb_option": {
+                    "name": "Comments",
+                    "default": "asdf",
+                    "option_id": "pbcommand.task_options.comment",
+                    "choices": null,
+                    "optionTypeId": "pbsmrtpipe.option_types.string",
+                    "type": "string",
+                    "description": "A string parameter"
+                },
+                "title": "JSON Schema for pbcommand.task_options.comment",
+                "required": [
+                    "pbcommand.task_options.comment"
+                ],
+                "$schema": "http://json-schema.org/draft-04/schema#",
+                "type": "object",
+                "properties": {
+                    "pbcommand.task_options.comment": {
+                        "default": "asdf",
+                        "type": "string",
+                        "description": "A string parameter",
+                        "title": "Comments"
+                    }
+                }
+            }
+        ],
+        "output_types": [
+            {
+                "title": "Output Report",
+                "description": "Output PacBio Report JSON",
+                "default_name": "example.report",
+                "id": "rpt",
+                "file_type_id": "PacBio.FileTypes.JsonReport"
+            }
+        ],
+        "_comment": "Created by v0.5.0",
+        "name": "DevApp",
+        "input_types": [
+            {
+                "description": "Input csv description",
+                "title": "Input CSV",
+                "id": "csv",
+                "file_type_id": "PacBio.FileTypes.csv"
+            }
+        ],
+        "nproc": 2,
+        "is_distributed": false,
+        "tool_contract_id": "pbcommand.tasks.dev_mixed_app"
+    }
+}
diff --git a/tests/data/dev_scatter_fasta_app_tool_contract.json b/tests/data/tool-contracts-v1/dev_scatter_fasta_app_tool_contract.json
similarity index 54%
rename from tests/data/dev_scatter_fasta_app_tool_contract.json
rename to tests/data/tool-contracts-v1/dev_scatter_fasta_app_tool_contract.json
index c21571c..f92c653 100644
--- a/tests/data/dev_scatter_fasta_app_tool_contract.json
+++ b/tests/data/tool-contracts-v1/dev_scatter_fasta_app_tool_contract.json
@@ -1,65 +1,65 @@
 {
-    "version": "0.1.0", 
+    "version": "0.1.0",
     "driver": {
-        "serialization": "json", 
-        "exe": "python -m pbcommand.cli.examples.dev_scatter_fasta_app --resolved-tool-contract ", 
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.examples.dev_scatter_fasta_app --resolved-tool-contract ",
         "env": {}
-    }, 
-    "tool_contract_id": "pbcommand.tasks.dev_scatter_fasta", 
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_scatter_fasta",
     "tool_contract": {
-        "task_type": "pbsmrtpipe.task_types.scattered", 
-        "resource_types": [], 
-        "description": "Scatter a single fasta file to create chunk.json file", 
+        "task_type": "pbsmrtpipe.task_types.scattered",
+        "resource_types": [],
+        "description": "Scatter a single fasta file to create chunk.json file",
         "schema_options": [
             {
                 "pb_option": {
-                    "default": 10, 
-                    "type": "integer", 
-                    "option_id": "pbcommand.task_options.dev_scatter_fa_nchunks", 
-                    "name": "Number of chunks", 
+                    "default": 10,
+                    "type": "integer",
+                    "option_id": "pbcommand.task_options.dev_scatter_fa_nchunks",
+                    "name": "Number of chunks",
                     "description": "Suggested number of chunks. May be overridden by $max_nchunks"
-                }, 
-                "title": "JSON Schema for pbcommand.task_options.dev_scatter_fa_nchunks", 
+                },
+                "title": "JSON Schema for pbcommand.task_options.dev_scatter_fa_nchunks",
                 "required": [
                     "pbcommand.task_options.dev_scatter_fa_nchunks"
-                ], 
-                "$schema": "http://json-schema.org/draft-04/schema#", 
-                "type": "object", 
+                ],
+                "$schema": "http://json-schema.org/draft-04/schema#",
+                "type": "object",
                 "properties": {
                     "pbcommand.task_options.dev_scatter_fa_nchunks": {
-                        "default": 10, 
-                        "type": "integer", 
-                        "description": "Suggested number of chunks. May be overridden by $max_nchunks", 
+                        "default": 10,
+                        "type": "integer",
+                        "description": "Suggested number of chunks. May be overridden by $max_nchunks",
                         "title": "Number of chunks"
                     }
                 }
             }
-        ], 
+        ],
         "output_types": [
             {
-                "title": "Chunk JSON", 
-                "description": "Scattered/Chunked Fasta Chunk.json", 
-                "default_name": "fasta.chunks", 
-                "id": "cjson", 
+                "title": "Chunk JSON",
+                "description": "Scattered/Chunked Fasta Chunk.json",
+                "default_name": "fasta.chunks",
+                "id": "cjson",
                 "file_type_id": "PacBio.FileTypes.CHUNK"
             }
-        ], 
-        "_comment": "Created by v0.2.14", 
-        "nchunks": "$max_nchunks", 
-        "name": "Fasta Scatter", 
+        ],
+        "_comment": "Created by v0.4.9",
+        "nchunks": "$max_nchunks",
+        "name": "Fasta Scatter",
         "input_types": [
             {
-                "description": "Fasta file to scatter", 
-                "title": "Fasta In", 
-                "id": "fasta_in", 
+                "description": "Fasta file to scatter",
+                "title": "Fasta In",
+                "id": "fasta_in",
                 "file_type_id": "PacBio.FileTypes.Fasta"
             }
-        ], 
+        ],
         "chunk_keys": [
             "$chunk.fasta_id"
-        ], 
-        "nproc": 1, 
-        "is_distributed": false, 
+        ],
+        "nproc": 1,
+        "is_distributed": false,
         "tool_contract_id": "pbcommand.tasks.dev_scatter_fasta"
     }
 }
diff --git a/tests/data/dev_example_tool_contract.json b/tests/data/tool-contracts-v1/pbcommand.tasks.dev_app_tool_contract.json
similarity index 52%
rename from tests/data/dev_example_tool_contract.json
rename to tests/data/tool-contracts-v1/pbcommand.tasks.dev_app_tool_contract.json
index e151daf..ff4fbbe 100644
--- a/tests/data/dev_example_tool_contract.json
+++ b/tests/data/tool-contracts-v1/pbcommand.tasks.dev_app_tool_contract.json
@@ -1,65 +1,65 @@
 {
-    "version": "0.2.1", 
+    "version": "0.2.1",
     "driver": {
-        "serialization": "json", 
-        "exe": "python -m pbcommand.cli.example.dev_app --resolved-tool-contract ", 
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.example.dev_app --resolved-tool-contract ",
         "env": {}
-    }, 
-    "tool_contract_id": "pbcommand.tasks.dev_app", 
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_app",
     "tool_contract": {
-        "task_type": "pbsmrtpipe.task_types.standard", 
+        "task_type": "pbsmrtpipe.task_types.standard",
         "resource_types": [
-            "$tmpfile", 
-            "$tmpfile", 
+            "$tmpfile",
+            "$tmpfile",
             "$tmpdir"
-        ], 
-        "description": "Dev app for Testing that supports emitting tool contracts", 
+        ],
+        "description": "Dev app for Testing that supports emitting tool contracts",
         "schema_options": [
             {
                 "pb_option": {
-                    "default": 25, 
-                    "type": "integer", 
-                    "option_id": "pbcommand.task_options.dev_read_length", 
-                    "name": "Length filter", 
+                    "default": 25,
+                    "type": "integer",
+                    "option_id": "pbcommand.task_options.dev_read_length",
+                    "name": "Length filter",
                     "description": "Min Sequence Length filter"
-                }, 
-                "title": "JSON Schema for pbcommand.task_options.dev_read_length", 
+                },
+                "title": "JSON Schema for pbcommand.task_options.dev_read_length",
                 "required": [
                     "pbcommand.task_options.dev_read_length"
-                ], 
-                "$schema": "http://json-schema.org/draft-04/schema#", 
-                "type": "object", 
+                ],
+                "$schema": "http://json-schema.org/draft-04/schema#",
+                "type": "object",
                 "properties": {
                     "pbcommand.task_options.dev_read_length": {
-                        "default": 25, 
-                        "type": "integer", 
-                        "description": "Min Sequence Length filter", 
+                        "default": 25,
+                        "type": "integer",
+                        "description": "Min Sequence Length filter",
                         "title": "Length filter"
                     }
                 }
             }
-        ], 
+        ],
         "output_types": [
             {
-                "title": "Filtered Fasta file", 
-                "description": "Filtered Fasta file", 
-                "default_name": "filter", 
-                "id": "fasta_out", 
+                "title": "Filtered Fasta file",
+                "description": "Filtered Fasta file",
+                "default_name": "filter",
+                "id": "fasta_out",
                 "file_type_id": "PacBio.FileTypes.Fasta"
             }
-        ], 
-        "_comment": "Created by v0.2.14", 
-        "name": "Example Dev App", 
+        ],
+        "_comment": "Created by v0.4.9",
+        "name": "Example Dev App",
         "input_types": [
             {
-                "description": "PacBio Spec'ed fasta file", 
-                "title": "Fasta File", 
-                "id": "fasta_in", 
+                "description": "PacBio Spec'ed fasta file",
+                "title": "Fasta File",
+                "id": "fasta_in",
                 "file_type_id": "PacBio.FileTypes.Fasta"
             }
-        ], 
-        "nproc": 1, 
-        "is_distributed": false, 
+        ],
+        "nproc": 1,
+        "is_distributed": false,
         "tool_contract_id": "pbcommand.tasks.dev_app"
     }
 }
diff --git a/tests/data/pbcommand.tasks.dev_fastq2fasta_tool_contract.json b/tests/data/tool-contracts-v1/pbcommand.tasks.dev_fastq2fasta_tool_contract.json
similarity index 51%
rename from tests/data/pbcommand.tasks.dev_fastq2fasta_tool_contract.json
rename to tests/data/tool-contracts-v1/pbcommand.tasks.dev_fastq2fasta_tool_contract.json
index 3e9ba28..832df17 100644
--- a/tests/data/pbcommand.tasks.dev_fastq2fasta_tool_contract.json
+++ b/tests/data/tool-contracts-v1/pbcommand.tasks.dev_fastq2fasta_tool_contract.json
@@ -1,84 +1,84 @@
 {
     "driver": {
-        "env": {}, 
-        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ", 
+        "env": {},
+        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ",
         "serialization": "json"
-    }, 
+    },
     "tool_contract": {
-        "_comment": "Created by v0.3.5", 
-        "description": "Quick tool dev_fastq2fasta pbcommand.tasks.dev_fastq2fasta", 
+        "_comment": "Created by v0.4.9",
+        "description": "Dev Task Fastq to Fasta Example",
         "input_types": [
             {
-                "description": "description for PacBio.FileTypes.Fastq_0", 
-                "file_type_id": "PacBio.FileTypes.Fastq", 
-                "id": "Label PacBio.FileTypes.Fastq_0", 
-                "title": "<FileType id=PacBio.FileTypes.Fastq name=file.fastq >"
+                "description": "description for PacBio.FileTypes.Fastq_0",
+                "file_type_id": "PacBio.FileTypes.Fastq",
+                "id": "Label PacBio.FileTypes.Fastq_0",
+                "title": "<FileType id=PacBio.FileTypes.Fastq name=file >"
             }
-        ], 
-        "is_distributed": true, 
-        "name": "Tool dev_fastq2fasta", 
-        "nproc": 1, 
+        ],
+        "is_distributed": true,
+        "name": "Fastq to Fasta",
+        "nproc": 1,
         "output_types": [
             {
-                "default_name": "file", 
-                "description": "description for <FileType id=PacBio.FileTypes.Fasta name=file.fasta >", 
-                "file_type_id": "PacBio.FileTypes.Fasta", 
-                "id": "Label PacBio.FileTypes.Fasta_0", 
-                "title": "<FileType id=PacBio.FileTypes.Fasta name=file.fasta >"
+                "default_name": "file",
+                "description": "description for <FileType id=PacBio.FileTypes.Fasta name=file >",
+                "file_type_id": "PacBio.FileTypes.Fasta",
+                "id": "Label PacBio.FileTypes.Fasta_0",
+                "title": "<FileType id=PacBio.FileTypes.Fasta name=file >"
             }
-        ], 
-        "resource_types": [], 
+        ],
+        "resource_types": [],
         "schema_options": [
             {
-                "$schema": "http://json-schema.org/draft-04/schema#", 
+                "$schema": "http://json-schema.org/draft-04/schema#",
                 "pb_option": {
-                    "default": 1234.0, 
-                    "description": "Beta Description", 
-                    "name": "Beta Name", 
-                    "option_id": "pbcommand.task_options.beta", 
+                    "default": 1234.0,
+                    "description": "Beta Description",
+                    "name": "Beta Name",
+                    "option_id": "pbcommand.task_options.beta",
                     "type": "number"
-                }, 
+                },
                 "properties": {
                     "pbcommand.task_options.beta": {
-                        "default": 1234.0, 
-                        "description": "Beta Description", 
-                        "title": "Beta Name", 
+                        "default": 1234.0,
+                        "description": "Beta Description",
+                        "title": "Beta Name",
                         "type": "number"
                     }
-                }, 
+                },
                 "required": [
                     "pbcommand.task_options.beta"
-                ], 
-                "title": "JSON Schema for pbcommand.task_options.beta", 
+                ],
+                "title": "JSON Schema for pbcommand.task_options.beta",
                 "type": "object"
-            }, 
+            },
             {
-                "$schema": "http://json-schema.org/draft-04/schema#", 
+                "$schema": "http://json-schema.org/draft-04/schema#",
                 "pb_option": {
-                    "default": true, 
-                    "description": "Option gamma description", 
-                    "name": "Option gamma", 
-                    "option_id": "pbcommand.task_options.gamma", 
+                    "default": true,
+                    "description": "Option gamma description",
+                    "name": "Option gamma",
+                    "option_id": "pbcommand.task_options.gamma",
                     "type": "boolean"
-                }, 
+                },
                 "properties": {
                     "pbcommand.task_options.gamma": {
-                        "default": true, 
-                        "description": "Option gamma description", 
-                        "title": "Option gamma", 
+                        "default": true,
+                        "description": "Option gamma description",
+                        "title": "Option gamma",
                         "type": "boolean"
                     }
-                }, 
+                },
                 "required": [
                     "pbcommand.task_options.gamma"
-                ], 
-                "title": "JSON Schema for pbcommand.task_options.gamma", 
+                ],
+                "title": "JSON Schema for pbcommand.task_options.gamma",
                 "type": "object"
             }
-        ], 
-        "task_type": "pbsmrtpipe.task_types.standard", 
+        ],
+        "task_type": "pbsmrtpipe.task_types.standard",
         "tool_contract_id": "pbcommand.tasks.dev_fastq2fasta"
-    }, 
-    "tool_contract_id": "pbcommand.tasks.dev_fastq2fasta", 
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_fastq2fasta",
     "version": "0.1.0"
-}
+}
\ No newline at end of file
diff --git a/tests/data/pbcommand.tasks.dev_qhello_world_tool_contract.json b/tests/data/tool-contracts-v1/pbcommand.tasks.dev_qhello_world_tool_contract.json
similarity index 54%
rename from tests/data/pbcommand.tasks.dev_qhello_world_tool_contract.json
rename to tests/data/tool-contracts-v1/pbcommand.tasks.dev_qhello_world_tool_contract.json
index 68edff9..f20996f 100644
--- a/tests/data/pbcommand.tasks.dev_qhello_world_tool_contract.json
+++ b/tests/data/tool-contracts-v1/pbcommand.tasks.dev_qhello_world_tool_contract.json
@@ -1,61 +1,61 @@
 {
     "driver": {
-        "env": {}, 
-        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ", 
+        "env": {},
+        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ",
         "serialization": "json"
-    }, 
+    },
     "tool_contract": {
-        "_comment": "Created by v0.2.14", 
-        "description": "Quick tool dev_qhello_world pbcommand.tasks.dev_qhello_world", 
+        "_comment": "Created by v0.4.9",
+        "description": "Quick tool dev_qhello_world pbcommand.tasks.dev_qhello_world",
         "input_types": [
             {
-                "description": "description for PacBio.FileTypes.Fasta_0", 
-                "file_type_id": "PacBio.FileTypes.Fasta", 
-                "id": "Label PacBio.FileTypes.Fasta_0", 
-                "title": "<FileType id=PacBio.FileTypes.Fasta name=file.fasta >"
+                "description": "description for PacBio.FileTypes.Fasta_0",
+                "file_type_id": "PacBio.FileTypes.Fasta",
+                "id": "Label PacBio.FileTypes.Fasta_0",
+                "title": "<FileType id=PacBio.FileTypes.Fasta name=file >"
             }
-        ], 
-        "is_distributed": true, 
-        "name": "Tool dev_qhello_world", 
-        "nproc": 1, 
+        ],
+        "is_distributed": true,
+        "name": "Tool dev_qhello_world",
+        "nproc": 1,
         "output_types": [
             {
-                "default_name": "file", 
-                "description": "description for <FileType id=PacBio.FileTypes.Fasta name=file.fasta >", 
-                "file_type_id": "PacBio.FileTypes.Fasta", 
-                "id": "Label PacBio.FileTypes.Fasta_0", 
-                "title": "<FileType id=PacBio.FileTypes.Fasta name=file.fasta >"
+                "default_name": "file",
+                "description": "description for <FileType id=PacBio.FileTypes.Fasta name=file >",
+                "file_type_id": "PacBio.FileTypes.Fasta",
+                "id": "Label PacBio.FileTypes.Fasta_0",
+                "title": "<FileType id=PacBio.FileTypes.Fasta name=file >"
             }
-        ], 
-        "resource_types": [], 
+        ],
+        "resource_types": [],
         "schema_options": [
             {
-                "$schema": "http://json-schema.org/draft-04/schema#", 
+                "$schema": "http://json-schema.org/draft-04/schema#",
                 "pb_option": {
-                    "default": 1234, 
-                    "description": "Option alpha description", 
-                    "name": "Option alpha", 
-                    "option_id": "pbcommand.task_options.alpha", 
+                    "default": 1234,
+                    "description": "Option alpha description",
+                    "name": "Option alpha",
+                    "option_id": "pbcommand.task_options.alpha",
                     "type": "integer"
-                }, 
+                },
                 "properties": {
                     "pbcommand.task_options.alpha": {
-                        "default": 1234, 
-                        "description": "Option alpha description", 
-                        "title": "Option alpha", 
+                        "default": 1234,
+                        "description": "Option alpha description",
+                        "title": "Option alpha",
                         "type": "integer"
                     }
-                }, 
+                },
                 "required": [
                     "pbcommand.task_options.alpha"
-                ], 
-                "title": "JSON Schema for pbcommand.task_options.alpha", 
+                ],
+                "title": "JSON Schema for pbcommand.task_options.alpha",
                 "type": "object"
             }
-        ], 
-        "task_type": "pbsmrtpipe.task_types.standard", 
+        ],
+        "task_type": "pbsmrtpipe.task_types.standard",
         "tool_contract_id": "pbcommand.tasks.dev_qhello_world"
-    }, 
-    "tool_contract_id": "pbcommand.tasks.dev_qhello_world", 
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_qhello_world",
     "version": "0.2.1"
-}
+}
\ No newline at end of file
diff --git a/tests/data/tool-contracts-v1/pbcommand.tasks.dev_txt_custom_outs_tool_contract.json b/tests/data/tool-contracts-v1/pbcommand.tasks.dev_txt_custom_outs_tool_contract.json
new file mode 100644
index 0000000..cff96de
--- /dev/null
+++ b/tests/data/tool-contracts-v1/pbcommand.tasks.dev_txt_custom_outs_tool_contract.json
@@ -0,0 +1,44 @@
+{
+    "driver": {
+        "env": {},
+        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ",
+        "serialization": "json"
+    },
+    "tool_contract": {
+        "_comment": "Created by v0.4.9",
+        "description": "Quick tool dev_txt_custom_outs pbcommand.tasks.dev_txt_custom_outs",
+        "input_types": [
+            {
+                "description": "description for PacBio.FileTypes.txt_0",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "Label PacBio.FileTypes.txt_0",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            }
+        ],
+        "is_distributed": true,
+        "name": "Custom Txt Task",
+        "nproc": 1,
+        "output_types": [
+            {
+                "default_name": "PacBio.FileTypes.txt_file_0",
+                "description": "File <FileType id=PacBio.FileTypes.txt name=file >",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "label_PacBio.FileTypes.txt",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            },
+            {
+                "default_name": "PacBio.FileTypes.txt_file_1",
+                "description": "File <FileType id=PacBio.FileTypes.txt name=file >",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "label_PacBio.FileTypes.txt",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            }
+        ],
+        "resource_types": [],
+        "schema_options": [],
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "tool_contract_id": "pbcommand.tasks.dev_txt_custom_outs"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_txt_custom_outs",
+    "version": "0.1.0"
+}
\ No newline at end of file
diff --git a/tests/data/tool-contracts-v1/pbcommand.tasks.dev_txt_hello_tool_contract.json b/tests/data/tool-contracts-v1/pbcommand.tasks.dev_txt_hello_tool_contract.json
new file mode 100644
index 0000000..78d06bd
--- /dev/null
+++ b/tests/data/tool-contracts-v1/pbcommand.tasks.dev_txt_hello_tool_contract.json
@@ -0,0 +1,44 @@
+{
+    "driver": {
+        "env": {},
+        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ",
+        "serialization": "json"
+    },
+    "tool_contract": {
+        "_comment": "Created by v0.4.9",
+        "description": "Quick tool dev_txt_hello pbcommand.tasks.dev_txt_hello",
+        "input_types": [
+            {
+                "description": "description for PacBio.FileTypes.txt_0",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "Label PacBio.FileTypes.txt_0",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            }
+        ],
+        "is_distributed": false,
+        "name": "Tool dev_txt_hello",
+        "nproc": 3,
+        "output_types": [
+            {
+                "default_name": "file",
+                "description": "description for <FileType id=PacBio.FileTypes.txt name=file >",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "Label PacBio.FileTypes.txt_0",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            },
+            {
+                "default_name": "file",
+                "description": "description for <FileType id=PacBio.FileTypes.txt name=file >",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "Label PacBio.FileTypes.txt_1",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            }
+        ],
+        "resource_types": [],
+        "schema_options": [],
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "tool_contract_id": "pbcommand.tasks.dev_txt_hello"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_txt_hello",
+    "version": "0.1.0"
+}
\ No newline at end of file
diff --git a/tests/data/tool-contracts/dev_example_dev_txt_app_tool_contract.json b/tests/data/tool-contracts/dev_example_dev_txt_app_tool_contract.json
new file mode 100644
index 0000000..4bf2555
--- /dev/null
+++ b/tests/data/tool-contracts/dev_example_dev_txt_app_tool_contract.json
@@ -0,0 +1,50 @@
+{
+    "version": "0.1.0",
+    "driver": {
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.examples.dev_app --resolved-tool-contract ",
+        "env": {}
+    },
+    "schema_version": "2.0.0",
+    "tool_contract": {
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "resource_types": [
+            "$tmpfile",
+            "$tmpfile",
+            "$tmpdir"
+        ],
+        "description": "Dev app for Testing that supports emitting tool contracts",
+        "schema_options": [
+            {
+                "optionTypeId": "integer",
+                "default": 10,
+                "id": "pbcommand.task_options.dev_max_nlines",
+                "name": "Max Lines",
+                "description": "Max Number of lines to Copy"
+            }
+        ],
+        "output_types": [
+            {
+                "title": "Txt outfile",
+                "description": "Generic Output Txt file",
+                "default_name": "output",
+                "id": "txt_out",
+                "file_type_id": "PacBio.FileTypes.txt"
+            }
+        ],
+        "_comment": "Created by pbcommand 0.5.2",
+        "name": "Txt App",
+        "input_types": [
+            {
+                "description": "Generic Text File",
+                "title": "Txt file",
+                "id": "txt_in",
+                "file_type_id": "PacBio.FileTypes.txt"
+            }
+        ],
+        "nproc": 1,
+        "is_distributed": false,
+        "tool_contract_id": "pbcommand.tasks.dev_txt_app"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_txt_app"
+}
diff --git a/tests/data/tool-contracts/dev_example_tool_contract.json b/tests/data/tool-contracts/dev_example_tool_contract.json
new file mode 100644
index 0000000..d9eebe3
--- /dev/null
+++ b/tests/data/tool-contracts/dev_example_tool_contract.json
@@ -0,0 +1,50 @@
+{
+    "version": "0.2.1",
+    "driver": {
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.example.dev_app --resolved-tool-contract ",
+        "env": {}
+    },
+    "schema_version": "2.0.0",
+    "tool_contract": {
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "resource_types": [
+            "$tmpfile",
+            "$tmpfile",
+            "$tmpdir"
+        ],
+        "description": "Dev app for Testing that supports emitting tool contracts",
+        "schema_options": [
+            {
+                "optionTypeId": "integer",
+                "default": 25,
+                "id": "pbcommand.task_options.dev_read_length",
+                "name": "Length filter",
+                "description": "Min Sequence Length filter"
+            }
+        ],
+        "output_types": [
+            {
+                "title": "Filtered Fasta file",
+                "description": "Filtered Fasta file",
+                "default_name": "filter",
+                "id": "fasta_out",
+                "file_type_id": "PacBio.FileTypes.Fasta"
+            }
+        ],
+        "_comment": "Created by pbcommand 0.5.2",
+        "name": "Example Dev App",
+        "input_types": [
+            {
+                "description": "PacBio Spec'ed fasta file",
+                "title": "Fasta File",
+                "id": "fasta_in",
+                "file_type_id": "PacBio.FileTypes.Fasta"
+            }
+        ],
+        "nproc": 1,
+        "is_distributed": false,
+        "tool_contract_id": "pbcommand.tasks.dev_app"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_app"
+}
diff --git a/tests/data/tool-contracts/dev_gather_fasta_app_tool_contract.json b/tests/data/tool-contracts/dev_gather_fasta_app_tool_contract.json
new file mode 100644
index 0000000..71ecfb9
--- /dev/null
+++ b/tests/data/tool-contracts/dev_gather_fasta_app_tool_contract.json
@@ -0,0 +1,38 @@
+{
+    "version": "0.1.0",
+    "driver": {
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.examples.dev_gather_fasta_app --resolved-tool-contract ",
+        "env": {}
+    },
+    "schema_version": "2.0.0",
+    "tool_contract": {
+        "task_type": "pbsmrtpipe.task_types.gathered",
+        "resource_types": [],
+        "description": "Gather a fasta resources in a Chunk.json file",
+        "schema_options": [],
+        "output_types": [
+            {
+                "title": "Chunk JSON",
+                "description": "Output Fasta",
+                "default_name": "gathered",
+                "id": "output",
+                "file_type_id": "PacBio.FileTypes.Fasta"
+            }
+        ],
+        "_comment": "Created by pbcommand 0.5.2",
+        "name": "Fasta Chunk Gather",
+        "input_types": [
+            {
+                "description": "Chunked Fasta JSON Out",
+                "title": "Chunk JSON",
+                "id": "chunk_json",
+                "file_type_id": "PacBio.FileTypes.CHUNK"
+            }
+        ],
+        "nproc": 1,
+        "is_distributed": false,
+        "tool_contract_id": "pbcommand.tasks.dev_gather_fasta"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_gather_fasta"
+}
diff --git a/tests/data/tool-contracts/dev_mixed_app_tool_contract.json b/tests/data/tool-contracts/dev_mixed_app_tool_contract.json
new file mode 100644
index 0000000..143362c
--- /dev/null
+++ b/tests/data/tool-contracts/dev_mixed_app_tool_contract.json
@@ -0,0 +1,102 @@
+{
+    "version": "0.2.0",
+    "driver": {
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.examples.dev_mixed_app --resolved-tool-contract ",
+        "env": {}
+    },
+    "schema_version": "2.0.0",
+    "tool_contract": {
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "resource_types": [],
+        "description": "Dev app for Testing that supports emitting tool contracts",
+        "schema_options": [
+            {
+                "optionTypeId": "integer",
+                "default": 25,
+                "id": "pbcommand.task_options.alpha",
+                "name": "Alpha",
+                "description": "Alpha description"
+            },
+            {
+                "optionTypeId": "float",
+                "default": 1.234,
+                "id": "pbcommand.task_options.beta",
+                "name": "Beta",
+                "description": "Beta description"
+            },
+            {
+                "optionTypeId": "boolean",
+                "default": true,
+                "id": "pbcommand.task_options.gamma",
+                "name": "Gamma",
+                "description": "Gamma description"
+            },
+            {
+                "name": "Ploidy",
+                "default": "haploid",
+                "choices": [
+                    "haploid",
+                    "diploid"
+                ],
+                "optionTypeId": "choice_string",
+                "id": "pbcommand.task_options.ploidy",
+                "description": "Genome ploidy"
+            },
+            {
+                "name": "Delta",
+                "default": 1,
+                "choices": [
+                    1,
+                    2,
+                    3
+                ],
+                "optionTypeId": "choice_integer",
+                "id": "pbcommand.task_options.delta",
+                "description": "An integer choice"
+            },
+            {
+                "name": "Epsilon",
+                "default": 0.1,
+                "choices": [
+                    0.01,
+                    0.1,
+                    1.0
+                ],
+                "optionTypeId": "choice_float",
+                "id": "pbcommand.task_options.epsilon",
+                "description": "A float choice"
+            },
+            {
+                "optionTypeId": "string",
+                "default": "asdf",
+                "id": "pbcommand.task_options.comment",
+                "name": "Comments",
+                "description": "A string parameter"
+            }
+        ],
+        "output_types": [
+            {
+                "title": "Output Report",
+                "description": "Output PacBio Report JSON",
+                "default_name": "example.report",
+                "id": "rpt",
+                "file_type_id": "PacBio.FileTypes.JsonReport"
+            }
+        ],
+        "_comment": "Created by pbcommand 0.5.2",
+        "name": "DevApp",
+        "input_types": [
+            {
+                "description": "Input csv description",
+                "title": "Input CSV",
+                "id": "csv",
+                "file_type_id": "PacBio.FileTypes.csv"
+            }
+        ],
+        "nproc": 2,
+        "is_distributed": false,
+        "tool_contract_id": "pbcommand.tasks.dev_mixed_app"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_mixed_app"
+}
diff --git a/tests/data/tool-contracts/dev_scatter_fasta_app_tool_contract.json b/tests/data/tool-contracts/dev_scatter_fasta_app_tool_contract.json
new file mode 100644
index 0000000..123ae91
--- /dev/null
+++ b/tests/data/tool-contracts/dev_scatter_fasta_app_tool_contract.json
@@ -0,0 +1,50 @@
+{
+    "version": "0.1.0",
+    "driver": {
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.examples.dev_scatter_fasta_app --resolved-tool-contract ",
+        "env": {}
+    },
+    "schema_version": "2.0.0",
+    "tool_contract": {
+        "task_type": "pbsmrtpipe.task_types.scattered",
+        "resource_types": [],
+        "description": "Scatter a single fasta file to create chunk.json file",
+        "schema_options": [
+            {
+                "optionTypeId": "integer",
+                "default": 10,
+                "id": "pbcommand.task_options.dev_scatter_fa_nchunks",
+                "name": "Number of chunks",
+                "description": "Suggested number of chunks. May be overridden by $max_nchunks"
+            }
+        ],
+        "output_types": [
+            {
+                "title": "Chunk JSON",
+                "description": "Scattered/Chunked Fasta Chunk.json",
+                "default_name": "fasta.chunks",
+                "id": "cjson",
+                "file_type_id": "PacBio.FileTypes.CHUNK"
+            }
+        ],
+        "_comment": "Created by pbcommand 0.5.2",
+        "nchunks": "$max_nchunks",
+        "name": "Fasta Scatter",
+        "input_types": [
+            {
+                "description": "Fasta file to scatter",
+                "title": "Fasta In",
+                "id": "fasta_in",
+                "file_type_id": "PacBio.FileTypes.Fasta"
+            }
+        ],
+        "chunk_keys": [
+            "$chunk.fasta_id"
+        ],
+        "nproc": 1,
+        "is_distributed": false,
+        "tool_contract_id": "pbcommand.tasks.dev_scatter_fasta"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_scatter_fasta"
+}
diff --git a/tests/data/tool-contracts/makefile b/tests/data/tool-contracts/makefile
new file mode 100644
index 0000000..e408fd8
--- /dev/null
+++ b/tests/data/tool-contracts/makefile
@@ -0,0 +1,4 @@
+emit-tool-contracts:
+	python -m pbcommand.cli.examples.dev_scatter_fasta_app --emit-tool-contract > dev_scatter_fasta_app_tool_contract.json 
+	python -m pbcommand.cli.examples.dev_scatter_fasta_app --emit-tool-contract > dev_scatter_fasta_app_tool_contract.json
+	python -m pbcommand.cli.examples.dev_quick_hello_world emit-tool-contracts
diff --git a/tests/data/tool-contracts/pbcommand.tasks.dev_app_tool_contract.json b/tests/data/tool-contracts/pbcommand.tasks.dev_app_tool_contract.json
new file mode 100644
index 0000000..d9eebe3
--- /dev/null
+++ b/tests/data/tool-contracts/pbcommand.tasks.dev_app_tool_contract.json
@@ -0,0 +1,50 @@
+{
+    "version": "0.2.1",
+    "driver": {
+        "serialization": "json",
+        "exe": "python -m pbcommand.cli.example.dev_app --resolved-tool-contract ",
+        "env": {}
+    },
+    "schema_version": "2.0.0",
+    "tool_contract": {
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "resource_types": [
+            "$tmpfile",
+            "$tmpfile",
+            "$tmpdir"
+        ],
+        "description": "Dev app for Testing that supports emitting tool contracts",
+        "schema_options": [
+            {
+                "optionTypeId": "integer",
+                "default": 25,
+                "id": "pbcommand.task_options.dev_read_length",
+                "name": "Length filter",
+                "description": "Min Sequence Length filter"
+            }
+        ],
+        "output_types": [
+            {
+                "title": "Filtered Fasta file",
+                "description": "Filtered Fasta file",
+                "default_name": "filter",
+                "id": "fasta_out",
+                "file_type_id": "PacBio.FileTypes.Fasta"
+            }
+        ],
+        "_comment": "Created by pbcommand 0.5.2",
+        "name": "Example Dev App",
+        "input_types": [
+            {
+                "description": "PacBio Spec'ed fasta file",
+                "title": "Fasta File",
+                "id": "fasta_in",
+                "file_type_id": "PacBio.FileTypes.Fasta"
+            }
+        ],
+        "nproc": 1,
+        "is_distributed": false,
+        "tool_contract_id": "pbcommand.tasks.dev_app"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_app"
+}
diff --git a/tests/data/tool-contracts/pbcommand.tasks.dev_fastq2fasta_tool_contract.json b/tests/data/tool-contracts/pbcommand.tasks.dev_fastq2fasta_tool_contract.json
new file mode 100644
index 0000000..9339aa7
--- /dev/null
+++ b/tests/data/tool-contracts/pbcommand.tasks.dev_fastq2fasta_tool_contract.json
@@ -0,0 +1,53 @@
+{
+    "driver": {
+        "env": {},
+        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ",
+        "serialization": "json"
+    },
+    "schema_version": "2.0.0",
+    "tool_contract": {
+        "_comment": "Created by pbcommand 0.5.2",
+        "description": "Dev Task Fastq to Fasta Example",
+        "input_types": [
+            {
+                "description": "description for PacBio.FileTypes.Fastq_0",
+                "file_type_id": "PacBio.FileTypes.Fastq",
+                "id": "Label PacBio.FileTypes.Fastq_0",
+                "title": "<FileType id=PacBio.FileTypes.Fastq name=file >"
+            }
+        ],
+        "is_distributed": true,
+        "name": "Fastq to Fasta",
+        "nproc": 1,
+        "output_types": [
+            {
+                "default_name": "file",
+                "description": "description for <FileType id=PacBio.FileTypes.Fasta name=file >",
+                "file_type_id": "PacBio.FileTypes.Fasta",
+                "id": "Label PacBio.FileTypes.Fasta_0",
+                "title": "<FileType id=PacBio.FileTypes.Fasta name=file >"
+            }
+        ],
+        "resource_types": [],
+        "schema_options": [
+            {
+                "default": 1234.0,
+                "description": "Beta Description",
+                "id": "pbcommand.task_options.beta",
+                "name": "Beta Name",
+                "optionTypeId": "float"
+            },
+            {
+                "default": true,
+                "description": "Option gamma description",
+                "id": "pbcommand.task_options.gamma",
+                "name": "Option gamma",
+                "optionTypeId": "boolean"
+            }
+        ],
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "tool_contract_id": "pbcommand.tasks.dev_fastq2fasta"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_fastq2fasta",
+    "version": "0.1.0"
+}
\ No newline at end of file
diff --git a/tests/data/tool-contracts/pbcommand.tasks.dev_qhello_world_tool_contract.json b/tests/data/tool-contracts/pbcommand.tasks.dev_qhello_world_tool_contract.json
new file mode 100644
index 0000000..6329fea
--- /dev/null
+++ b/tests/data/tool-contracts/pbcommand.tasks.dev_qhello_world_tool_contract.json
@@ -0,0 +1,46 @@
+{
+    "driver": {
+        "env": {},
+        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ",
+        "serialization": "json"
+    },
+    "schema_version": "2.0.0",
+    "tool_contract": {
+        "_comment": "Created by pbcommand 0.5.2",
+        "description": "Quick tool dev_qhello_world pbcommand.tasks.dev_qhello_world",
+        "input_types": [
+            {
+                "description": "description for PacBio.FileTypes.Fasta_0",
+                "file_type_id": "PacBio.FileTypes.Fasta",
+                "id": "Label PacBio.FileTypes.Fasta_0",
+                "title": "<FileType id=PacBio.FileTypes.Fasta name=file >"
+            }
+        ],
+        "is_distributed": true,
+        "name": "Tool dev_qhello_world",
+        "nproc": 1,
+        "output_types": [
+            {
+                "default_name": "file",
+                "description": "description for <FileType id=PacBio.FileTypes.Fasta name=file >",
+                "file_type_id": "PacBio.FileTypes.Fasta",
+                "id": "Label PacBio.FileTypes.Fasta_0",
+                "title": "<FileType id=PacBio.FileTypes.Fasta name=file >"
+            }
+        ],
+        "resource_types": [],
+        "schema_options": [
+            {
+                "default": 1234,
+                "description": "Option alpha description",
+                "id": "pbcommand.task_options.alpha",
+                "name": "Option alpha",
+                "optionTypeId": "integer"
+            }
+        ],
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "tool_contract_id": "pbcommand.tasks.dev_qhello_world"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_qhello_world",
+    "version": "0.2.1"
+}
\ No newline at end of file
diff --git a/tests/data/tool-contracts/pbcommand.tasks.dev_txt_custom_outs_tool_contract.json b/tests/data/tool-contracts/pbcommand.tasks.dev_txt_custom_outs_tool_contract.json
new file mode 100644
index 0000000..a8d0c85
--- /dev/null
+++ b/tests/data/tool-contracts/pbcommand.tasks.dev_txt_custom_outs_tool_contract.json
@@ -0,0 +1,45 @@
+{
+    "driver": {
+        "env": {},
+        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ",
+        "serialization": "json"
+    },
+    "schema_version": "2.0.0",
+    "tool_contract": {
+        "_comment": "Created by pbcommand 0.5.2",
+        "description": "Quick tool dev_txt_custom_outs pbcommand.tasks.dev_txt_custom_outs",
+        "input_types": [
+            {
+                "description": "description for PacBio.FileTypes.txt_0",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "Label PacBio.FileTypes.txt_0",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            }
+        ],
+        "is_distributed": true,
+        "name": "Custom Txt Task",
+        "nproc": 1,
+        "output_types": [
+            {
+                "default_name": "PacBio.FileTypes.txt_file_0",
+                "description": "File <FileType id=PacBio.FileTypes.txt name=file >",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "label_PacBio.FileTypes.txt",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            },
+            {
+                "default_name": "PacBio.FileTypes.txt_file_1",
+                "description": "File <FileType id=PacBio.FileTypes.txt name=file >",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "label_PacBio.FileTypes.txt",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            }
+        ],
+        "resource_types": [],
+        "schema_options": [],
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "tool_contract_id": "pbcommand.tasks.dev_txt_custom_outs"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_txt_custom_outs",
+    "version": "0.1.0"
+}
\ No newline at end of file
diff --git a/tests/data/tool-contracts/pbcommand.tasks.dev_txt_hello_tool_contract.json b/tests/data/tool-contracts/pbcommand.tasks.dev_txt_hello_tool_contract.json
new file mode 100644
index 0000000..ffcf855
--- /dev/null
+++ b/tests/data/tool-contracts/pbcommand.tasks.dev_txt_hello_tool_contract.json
@@ -0,0 +1,45 @@
+{
+    "driver": {
+        "env": {},
+        "exe": "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc  ",
+        "serialization": "json"
+    },
+    "schema_version": "2.0.0",
+    "tool_contract": {
+        "_comment": "Created by pbcommand 0.5.2",
+        "description": "Quick tool dev_txt_hello pbcommand.tasks.dev_txt_hello",
+        "input_types": [
+            {
+                "description": "description for PacBio.FileTypes.txt_0",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "Label PacBio.FileTypes.txt_0",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            }
+        ],
+        "is_distributed": false,
+        "name": "Tool dev_txt_hello",
+        "nproc": 3,
+        "output_types": [
+            {
+                "default_name": "file",
+                "description": "description for <FileType id=PacBio.FileTypes.txt name=file >",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "Label PacBio.FileTypes.txt_0",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            },
+            {
+                "default_name": "file",
+                "description": "description for <FileType id=PacBio.FileTypes.txt name=file >",
+                "file_type_id": "PacBio.FileTypes.txt",
+                "id": "Label PacBio.FileTypes.txt_1",
+                "title": "<FileType id=PacBio.FileTypes.txt name=file >"
+            }
+        ],
+        "resource_types": [],
+        "schema_options": [],
+        "task_type": "pbsmrtpipe.task_types.standard",
+        "tool_contract_id": "pbcommand.tasks.dev_txt_hello"
+    },
+    "tool_contract_id": "pbcommand.tasks.dev_txt_hello",
+    "version": "0.1.0"
+}
\ No newline at end of file
diff --git a/tests/test_e2e_example_apps.py b/tests/test_e2e_example_apps.py
index ac86eeb..c22d7cf 100644
--- a/tests/test_e2e_example_apps.py
+++ b/tests/test_e2e_example_apps.py
@@ -1,19 +1,20 @@
 import logging
 
 from base_utils import get_data_file
-import pbcommand.testkit
+from pbcommand.testkit import PbTestApp
+from pbcommand.resolver import ToolContractError
 
 log = logging.getLogger(__name__)
 
 
-class TestRunDevApp(pbcommand.testkit.PbTestApp):
+class TestRunDevApp(PbTestApp):
     DRIVER_BASE = "python -m pbcommand.cli.examples.dev_app "
     REQUIRES_PBCORE = True
     INPUT_FILES = [get_data_file("example.fasta")]
     TASK_OPTIONS = {"pbcommand.task_options.dev_read_length": 27}
 
 
-class TestTxtDevApp(pbcommand.testkit.PbTestApp):
+class TestTxtDevApp(PbTestApp):
     DRIVER_BASE = "python -m pbcommand.cli.examples.dev_txt_app "
     # XXX using default args, so the emit/resolve drivers are automatic
     REQUIRES_PBCORE = False
@@ -22,7 +23,7 @@ class TestTxtDevApp(pbcommand.testkit.PbTestApp):
     RESOLVED_TASK_OPTIONS = {"pbcommand.task_options.dev_max_nlines": 27}
 
 
-class TestQuickDevHelloWorld(pbcommand.testkit.PbTestApp):
+class TestQuickDevHelloWorld(PbTestApp):
     """Runs dev_qhello_world """
     DRIVER_EMIT = "python -m pbcommand.cli.examples.dev_quick_hello_world  emit-tool-contract pbcommand.tasks.dev_qhello_world "
     DRIVER_RESOLVE = "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc "
@@ -33,7 +34,7 @@ class TestQuickDevHelloWorld(pbcommand.testkit.PbTestApp):
     RESOLVED_IS_DISTRIBUTED = True
 
 
-class TestQuickTxt(pbcommand.testkit.PbTestApp):
+class TestQuickTxt(PbTestApp):
     """Runs dev_qhello_world """
     DRIVER_EMIT = "python -m pbcommand.cli.examples.dev_quick_hello_world  emit-tool-contract pbcommand.tasks.dev_txt_hello "
     DRIVER_RESOLVE = "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc "
@@ -44,10 +45,62 @@ class TestQuickTxt(pbcommand.testkit.PbTestApp):
     RESOLVED_IS_DISTRIBUTED = False # XXX is_distributed=False in task TC!
 
 
-class TestQuickCustomTxtCustomOuts(pbcommand.testkit.PbTestApp):
+class TestQuickCustomTxtCustomOuts(PbTestApp):
     """Runs dev_qhello_world """
     DRIVER_EMIT = "python -m pbcommand.cli.examples.dev_quick_hello_world  emit-tool-contract pbcommand.tasks.dev_txt_custom_outs "
     DRIVER_RESOLVE = "python -m pbcommand.cli.examples.dev_quick_hello_world  run-rtc "
 
     REQUIRES_PBCORE = False
     INPUT_FILES = [get_data_file("example.txt")]
+
+
+class TestOptionTypes(PbTestApp):
+    DRIVER_BASE = "python -m pbcommand.cli.examples.dev_mixed_app"
+    REQUIRES_PBCORE = False
+    INPUT_FILES = [get_data_file("example.txt")]
+    TASK_OPTIONS = {
+        "pbcommand.task_options.alpha": 50,
+        "pbcommand.task_options.beta": 9.876,
+        "pbcommand.task_options.gamma": False,
+        "pbcommand.task_options.ploidy": "diploid"
+    }
+    RESOLVED_TASK_OPTIONS = {
+        "pbcommand.task_options.alpha": 50,
+        "pbcommand.task_options.beta": 9.876,
+        "pbcommand.task_options.gamma": False,
+        "pbcommand.task_options.ploidy": "diploid",
+        "pbcommand.task_options.delta": 1,
+        "pbcommand.task_options.epsilon": 0.1
+    }
+
+
+class TestBadChoiceValue(TestOptionTypes):
+    TASK_OPTIONS = {
+        "pbcommand.task_options.alpha": 50,
+        "pbcommand.task_options.beta": 9.876,
+        "pbcommand.task_options.gamma": False,
+        "pbcommand.task_options.ploidy": "other"
+    }
+
+    def test_run_e2e(self):
+        self.assertRaises(ToolContractError, super(TestBadChoiceValue, self).test_run_e2e)
+
+
+class TestQuickOptionTypes(PbTestApp):
+    DRIVER_EMIT = "python -m pbcommand.cli.examples.dev_quick_hello_world  emit-tool-contract pbcommand.tasks.dev_test_options"
+    DRIVER_RESOLVE = "python -m pbcommand.cli.examples.dev_quick_hello_world run-rtc "
+    INPUT_FILES = [get_data_file("example.txt")]
+    TASK_OPTIONS = {
+        "pbcommand.task_options.alpha": 50,
+        "pbcommand.task_options.beta": 9.876,
+        "pbcommand.task_options.gamma": False,
+        "pbcommand.task_options.ploidy": "diploid"
+    }
+    RESOLVED_TASK_OPTIONS = {
+        "pbcommand.task_options.alpha": 50,
+        "pbcommand.task_options.beta": 9.876,
+        "pbcommand.task_options.gamma": False,
+        "pbcommand.task_options.ploidy": "diploid",
+        "pbcommand.task_options.delta": 1,
+        "pbcommand.task_options.epsilon": 0.01
+    }
diff --git a/tests/test_load_resolved_tool_contract.py b/tests/test_load_resolved_tool_contract.py
index 84af431..7960c45 100644
--- a/tests/test_load_resolved_tool_contract.py
+++ b/tests/test_load_resolved_tool_contract.py
@@ -4,7 +4,7 @@ import unittest
 import logging
 import os.path
 
-from base_utils import get_data_file
+from base_utils import get_data_file, get_tool_contract, get_resolved_tool_contract
 from pbcommand.resolver import resolve_tool_contract
 from pbcommand.pb_io.tool_contract_io import (load_resolved_tool_contract_from,
                                               load_tool_contract_from)
@@ -28,7 +28,7 @@ class _TestUtil(unittest.TestCase):
 class TestLoadResolvedContract(unittest.TestCase):
 
     def test_01(self):
-        path = get_data_file("dev_example_resolved_tool_contract.json")
+        path = get_resolved_tool_contract("dev_example_resolved_tool_contract.json")
         rtc = load_resolved_tool_contract_from(path)
         log.info(rtc)
         self.assertIsNotNone(rtc)
@@ -38,7 +38,7 @@ class TestResolveContract(unittest.TestCase):
 
     def test_01(self):
         name = "dev_example_dev_txt_app_tool_contract.json"
-        p = get_data_file(name)
+        p = get_tool_contract(name)
         tc = load_tool_contract_from(p)
         input_files = ["/tmp/file.txt"]
         root_output_dir = "/tmp"
diff --git a/tests/test_models_common_pacbio_options.py b/tests/test_models_common_pacbio_options.py
new file mode 100644
index 0000000..31e99b8
--- /dev/null
+++ b/tests/test_models_common_pacbio_options.py
@@ -0,0 +1,167 @@
+import unittest
+import logging
+
+from pbcommand.models.common import (PacBioFloatChoiceOption, PacBioIntOption,
+                                     PacBioStringOption,
+                                     PacBioStringChoiceOption,
+                                     PacBioIntChoiceOption,
+                                     PacBioBooleanOption, PacBioFloatOption)
+
+log = logging.getLogger(__name__)
+
+
+def _to_i(s):
+    return "test.task_options.{}".format(s)
+
+
+def get_or(i, value):
+    return value if i is None else i
+
+
+class TestPacBioBasicOptionTest(unittest.TestCase):
+    OPT_KLASS = PacBioIntOption
+    OPT_ID = "alpha"
+    OPT_NAME = "Alpha"
+    OPT_DESC = "Alpha description"
+    OPT_DEFAULT = 2
+
+    def _to_opt(self, i=None, n=None, v=None, d=None):
+        ix = get_or(i, _to_i(self.OPT_ID))
+        name = get_or(n, self.OPT_NAME)
+        value = get_or(v, self.OPT_DEFAULT)
+        description = get_or(d, self.OPT_DESC)
+        return self.OPT_KLASS(ix, name, value, description)
+
+    def test_sanity_option(self):
+        o = self._to_opt()
+        log.debug("Created option {o}".format(o=o))
+
+        self.assertEqual(o.option_id, "test.task_options.{}".format(self.OPT_ID))
+        self.assertEqual(o.name, self.OPT_NAME)
+        self.assertEqual(o.default, self.OPT_DEFAULT)
+        self.assertEqual(o.description, self.OPT_DESC)
+
+
+class TestPacBioIntOptionTest(TestPacBioBasicOptionTest):
+
+    def test_bad_value_string(self):
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v="bad-string")
+
+    def test_bad_value_float(self):
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v=3.124)
+
+    def test_bad_value_boolean(self):
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v=True)
+
+
+class TestPacBioBooleanOptionTest(TestPacBioBasicOptionTest):
+    OPT_KLASS = PacBioBooleanOption
+    OPT_DEFAULT = True
+
+    def test_bad_value_int(self):
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v=1)
+
+    def test_bad_value_float(self):
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v=1.10)
+
+    def test_bad_value_string(self):
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v="bad-string")
+
+
+class TestPacBioFloatOptionTest(TestPacBioBasicOptionTest):
+    OPT_KLASS = PacBioFloatOption
+    OPT_DEFAULT = 3.1415
+
+    def test_coerced_value_int(self):
+        o = self._to_opt(v=1)
+        self.assertEqual(o.default, 1.0)
+
+    def test_bad_value_boolean(self):
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v=True)
+
+    def test_bad_value_string(self):
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v="bad-string")
+
+    def test_bad_value_float_tuple(self):
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v=(1.0, 2.0))
+
+
+class TestPacBioStringOptionTest(TestPacBioBasicOptionTest):
+    OPT_KLASS = PacBioStringOption
+    OPT_DEFAULT = "gamma"
+
+    def test_bad_value_int(self):
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v=1)
+
+    def test_bad_value_float(self):
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v=1.10)
+
+    def test_bad_not_supported_unicode(self):
+        """Test that unicode values are not Supported"""
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(v=unicode('abcdef'))
+
+
+class TestPacBioBasicChoiceTest(TestPacBioBasicOptionTest):
+    OPT_KLASS = PacBioStringChoiceOption
+    OPT_CHOICES = ("alpha", "beta", "gamma")
+    OPT_DEFAULT = "beta"
+    OPT_BAD_OPTION = "delta"
+
+    def _to_opt(self, i=None, n=None, v=None, d=None, c=None):
+        ix = get_or(i, _to_i(self.OPT_ID))
+        name = get_or(n, self.OPT_NAME)
+        value = get_or(v, self.OPT_DEFAULT)
+        description = get_or(d, self.OPT_DESC)
+        choices = get_or(c, self.OPT_CHOICES)
+        return self.OPT_KLASS(ix, name, value, description, choices)
+
+    def test_sanity_choice_option(self):
+        o = self._to_opt()
+        self.assertEqual(o.choices, self.OPT_CHOICES)
+
+    def test_bad_invalid_choice(self):
+        with self.assertRaises(ValueError):
+            _ = self._to_opt(v=self.OPT_BAD_OPTION)
+
+
+class TestPacBioChoiceStringOptionTest(TestPacBioBasicChoiceTest):
+    OPT_KLASS = PacBioStringChoiceOption
+    OPT_DEFAULT = "gamma"
+    OPT_BAD_OPTION = "Bad-value"
+
+
+class TestPacBioIntChoiceOptionTest(TestPacBioBasicChoiceTest):
+    OPT_KLASS = PacBioIntChoiceOption
+    OPT_CHOICES = (1, 2, 7)
+    OPT_DEFAULT = 2
+    OPT_BAD_OPTION = 3
+
+
+class TestPacBioFloatChoiceOptionTest(TestPacBioBasicChoiceTest):
+    OPT_KLASS = PacBioFloatChoiceOption
+    OPT_CHOICES = (1.0, 2.0, 7.0)
+    OPT_DEFAULT = 2.0
+    OPT_BAD_OPTION = -1.0
+
+    def test_coerce_float_choices(self):
+        choices = (10, 12123, 12)
+        o = self._to_opt(c=choices, v=12)
+
+    def test_bad_choices(self):
+        choices = (1, 2.0, "bad-value")
+        with self.assertRaises(TypeError):
+            _ = self._to_opt(c=choices)
+
+
diff --git a/tests/test_models_report.py b/tests/test_models_report.py
index b4b6dfb..c736442 100644
--- a/tests/test_models_report.py
+++ b/tests/test_models_report.py
@@ -1,25 +1,29 @@
 import json
 import logging
 from pprint import pformat
+import os.path
 import re
 import unittest
 
-from pbcommand.pb_io import load_report_from_json
+from pbcommand.pb_io import load_report_from_json, load_report_spec_from_json
 from pbcommand.models.report import (Report, Attribute, PlotGroup, Plot, Table,
-                                     Column, PbReportError)
+                                     Column, PbReportError, format_metric)
+from pbcommand.schemas import validate_report
 
 _SERIALIZED_JSON_DIR = 'example-reports'
 
-from base_utils import get_data_file_from_subdir
+from base_utils import get_data_file_from_subdir, DATA_DIR
 
 log = logging.getLogger(__name__)
 
+
 def _to_report(name):
     file_name = get_data_file_from_subdir(_SERIALIZED_JSON_DIR, name)
     log.info("loading json report from {f}".format(f=file_name))
     r = load_report_from_json(file_name)
     return r
 
+
 class TestReportModel(unittest.TestCase):
 
     def test_from_simple_dict(self):
@@ -94,24 +98,23 @@ class TestReportModel(unittest.TestCase):
         self.assertEqual('redfang.a', d['attributes'][0]['id'])
         self.assertEqual('redfang.a2', d['attributes'][1]['id'])
         self.assertEqual('redfang.pgid', d['plotGroups'][0]['id'])
-        self.assertEqual('redfang.pgid.pid', d['plotGroups'][0]['plots'][0]['id'])
-        self.assertEqual('redfang.pgid.pid2', d['plotGroups'][0]['plots'][1]['id'])
+        self.assertEqual('redfang.pgid.pid', d[
+                         'plotGroups'][0]['plots'][0]['id'])
+        self.assertEqual('redfang.pgid.pid2', d[
+                         'plotGroups'][0]['plots'][1]['id'])
 
         self.assertEqual('redfang.tabid', d['tables'][0]['id'])
-        self.assertEqual('redfang.tabid.c1', d['tables'][0]['columns'][0]['id'])
+        self.assertEqual('redfang.tabid.c1', d['tables'][
+                         0]['columns'][0]['id'])
 
     def test_version_and_changelist(self):
         r = Report('example')
         d = r.to_dict()
         log.info("\n" + pformat(d))
-        self.assertTrue('_version' in d)
-        self.assertTrue('_changelist' in d)
 
-        # Not used anymore. The all version information is encoded in _version.
-        # that should be sufficient.
-        # self.assertTrue(isinstance(d['_changelist'], int))
-        rx = re.compile(r'[0-9]*\.[0-9]*')
-        self.assertIsNotNone(rx.search(d['_version']))
+        fields = ('version', 'uuid', 'plotGroups', 'tables', 'dataset_uuids')
+        for field in fields:
+            self.assertTrue(field in d)
 
     def test_to_dict_multi(self):
         """
@@ -152,18 +155,24 @@ class TestReportModel(unittest.TestCase):
         self.assertEqual('redfang.a2', d['attributes'][1]['id'])
 
         self.assertEqual('redfang.pgid', d['plotGroups'][0]['id'])
-        self.assertEqual('redfang.pgid.pid', d['plotGroups'][0]['plots'][0]['id'])
-        self.assertEqual('redfang.pgid.pid2', d['plotGroups'][0]['plots'][1]['id'])
+        self.assertEqual('redfang.pgid.pid', d[
+                         'plotGroups'][0]['plots'][0]['id'])
+        self.assertEqual('redfang.pgid.pid2', d[
+                         'plotGroups'][0]['plots'][1]['id'])
 
         self.assertEqual('redfang.pgid2', d['plotGroups'][1]['id'])
-        self.assertEqual('redfang.pgid2.pid2', d['plotGroups'][1]['plots'][0]['id'])
-        self.assertEqual('redfang.pgid2.pid22', d['plotGroups'][1]['plots'][1]['id'])
+        self.assertEqual('redfang.pgid2.pid2', d[
+                         'plotGroups'][1]['plots'][0]['id'])
+        self.assertEqual('redfang.pgid2.pid22', d[
+                         'plotGroups'][1]['plots'][1]['id'])
 
         self.assertEqual('redfang.tabid', d['tables'][0]['id'])
-        self.assertEqual('redfang.tabid.c1', d['tables'][0]['columns'][0]['id'])
+        self.assertEqual('redfang.tabid.c1', d['tables'][
+                         0]['columns'][0]['id'])
 
         self.assertEqual('redfang.tabid2', d['tables'][1]['id'])
-        self.assertEqual('redfang.tabid2.c2', d['tables'][1]['columns'][0]['id'])
+        self.assertEqual('redfang.tabid2.c2', d[
+                         'tables'][1]['columns'][0]['id'])
 
         log.info(repr(r))
         self.assertIsNotNone(repr(r))
@@ -190,6 +199,84 @@ class TestReportModel(unittest.TestCase):
         bad_a = report.get_attribute_by_id('id_that_does_not_exist')
         self.assertIsNone(bad_a)
 
+    def test_get_table_by_id(self):
+        r = Report('redfang')
+        t1 = Table('tabid1')
+        t1.add_column(Column('c1'))
+        r.add_table(t1)
+
+        t = r.get_table_by_id('tabid1')
+        self.assertEqual(t, t1)
+
+    def test_get_table_by_id_with_bad_id(self):
+        r = Report('redfang')
+        t1 = Table('tabid1')
+        t1.add_column(Column('c1'))
+        r.add_table(t1)
+
+        bad_t = r.get_table_by_id('id_that_does_not_exist')
+        self.assertIsNone(bad_t)
+
+    def test_get_column_by_id(self):
+        r = Report('redfang')
+        t1 = Table('tabid1')
+        c1 = Column('c1')
+        t1.add_column(c1)
+        r.add_table(t1)
+
+        c = r.get_table_by_id('tabid1').get_column_by_id('c1')
+        self.assertEqual(c, c1)
+
+    def test_get_column_by_id_with_bad_id(self):
+        r = Report('redfang')
+        t1 = Table('tabid1')
+        c1 = Column('c1')
+        t1.add_column(c1)
+        r.add_table(t1)
+
+        bad_c = r.get_table_by_id('tabid1').get_column_by_id(
+            'id_that_does_not_exist')
+        self.assertIsNone(bad_c)
+
+    def test_get_plotgroup_by_id(self):
+        r = Report('redfang')
+        pg1 = PlotGroup('pgid1')
+        pg1.add_plot(Plot('pid1', 'anImg'))
+        r.add_plotgroup(pg1)
+
+        pg = r.get_plotgroup_by_id('pgid1')
+        self.assertEqual(pg, pg1)
+
+    def test_get_plotgroup_by_id_with_bad_id(self):
+        r = Report('redfang')
+        pg1 = PlotGroup('pgid1')
+        pg1.add_plot(Plot('pid1', 'anImg'))
+        r.add_plotgroup(pg1)
+
+        bad_pg = r.get_plotgroup_by_id('id_that_does_not_exist')
+        self.assertIsNone(bad_pg)
+
+    def test_get_plot_by_id(self):
+        r = Report('redfang')
+        pg1 = PlotGroup('pgid1')
+        p1 = Plot('pid1', 'anImg')
+        pg1.add_plot(p1)
+        r.add_plotgroup(pg1)
+
+        p = r.get_plotgroup_by_id('pgid1').get_plot_by_id('pid1')
+        self.assertEqual(p, p1)
+
+    def test_get_plot_by_id_with_bad_id(self):
+        r = Report('redfang')
+        pg1 = PlotGroup('pgid1')
+        p1 = Plot('pid1', 'anImg')
+        pg1.add_plot(p1)
+        r.add_plotgroup(pg1)
+
+        bad_p = r.get_plotgroup_by_id(
+            'pgid1').get_plot_by_id('id_that_does_not_exist')
+        self.assertIsNone(bad_p)
+
     def test_merge(self):
         EXPECTED_VALUES = {
             "n_reads": 300,
@@ -201,14 +288,16 @@ class TestReportModel(unittest.TestCase):
         }
         chunks = [
             Report("pbcommand_test",
-                attributes=[
-                    Attribute(id_="n_reads", value=50, name="Number of reads"),
-                    Attribute(id_="n_zmws", value=10, name="Number of ZMWs")],
-                dataset_uuids=["12345"]),
+                   attributes=[
+                       Attribute(id_="n_reads", value=50,
+                                 name="Number of reads"),
+                       Attribute(id_="n_zmws", value=10, name="Number of ZMWs")],
+                   dataset_uuids=["12345"]),
             Report("pbcommand_test",
-                attributes=[
-                    Attribute(id_="n_reads", value=250, name="Number of reads"),
-                    Attribute(id_="n_zmws", value=50, name="Number of ZMWs")]),
+                   attributes=[
+                       Attribute(id_="n_reads", value=250,
+                                 name="Number of reads"),
+                       Attribute(id_="n_zmws", value=50, name="Number of ZMWs")]),
         ]
         r = Report.merge(chunks)
         self.assertEqual([a.id for a in r.attributes], ["n_reads", "n_zmws"])
@@ -242,3 +331,83 @@ class TestReportModel(unittest.TestCase):
                      'BarcodeFasta3'])
             else:
                 self.assertEqual(col.values, [1, 2, 4, 3])
+
+
+class TestMalformedReport(unittest.TestCase):
+
+    def test_bad_01(self):
+        r = Report("stuff", uuid=1234)
+        d = r.to_dict()
+
+        def fx():
+            # when the Report validation is enabled, use to_json
+            # r.to_json()
+            return validate_report(d)
+
+        self.assertRaises(IOError, fx)
+
+
+class TestReportSchemaVersion100(unittest.TestCase):
+
+    name = "example_version_1_0_0.json"
+
+    def test_sanity(self):
+        r = _to_report(self.name)
+        self.assertIsInstance(r, Report)
+
+
+class TestRepotSchemaVersion100WithPlots(TestReportSchemaVersion100):
+    name = "example_with_plot.json"
+
+
+class TestReportSpec(unittest.TestCase):
+
+    def setUp(self):
+        self.spec = load_report_spec_from_json(
+            os.path.join(DATA_DIR, "report-specs", "report_spec.json"))
+
+    def test_report_validation(self):
+        rpt = _to_report("test_report.json")
+        r = self.spec.validate_report(rpt)
+        self.assertTrue(isinstance(r, Report))
+        rpt.attributes.append(Attribute("attribute5", value=12345))
+        error_len = lambda e: len(e.message.split("\n"))
+        try:
+            self.spec.validate_report(rpt)
+        except ValueError as e:
+            self.assertEqual(error_len(e), 2)
+        else:
+            self.fail("Expected exception")
+        self.assertFalse(self.spec.is_valid_report(rpt))
+        rpt.attributes[0] = Attribute("attribute1", value=1.2345)
+        try:
+            self.spec.validate_report(rpt)
+        except ValueError as e:
+            print e
+            self.assertEqual(error_len(e), 3)
+        else:
+            self.fail("Expected exception")
+        self.assertFalse(self.spec.is_valid_report(rpt))
+
+    def test_format_metric(self):
+        s = format_metric("{:,d}", 123456789)
+        self.assertEqual(s, "123,456,789")
+        s = format_metric("{:.4g}", 1.2345678)
+        self.assertEqual(s, "1.235")
+        s = format_metric("{M:.2f} Mb", 123456789)
+        self.assertEqual(s, "123.46 Mb")
+        s = format_metric("{p:.5g}%", 0.987654321)
+        self.assertEqual(s, "98.765%")
+        s = format_metric("{p:g}", 0.000001)
+        self.assertEqual(s, "0.0001%")
+        s = format_metric("{:,.3f}", 1000000.2345678)
+        self.assertEqual(s, "1,000,000.235")
+
+    def test_apply_view(self):
+        rpt = _to_report("test_report2.json")
+        rpt = self.spec.apply_view(rpt)
+        self.assertTrue(all([a.name is not None for a in rpt.attributes]))
+        self.assertTrue(all([t.title is not None for t in rpt.tables]))
+        self.assertTrue(all([c.header is not None for c in rpt.tables[0].columns]))
+        self.assertTrue(all([pg.title is not None for pg in rpt.plotGroups]))
+        self.assertTrue(all([p.title is not None for p in rpt.plotGroups[0].plots]))
diff --git a/tests/test_models_report_attribute.py b/tests/test_models_report_attribute.py
index bb5cd0c..2c8ae64 100644
--- a/tests/test_models_report_attribute.py
+++ b/tests/test_models_report_attribute.py
@@ -15,13 +15,6 @@ class TestAttribute(unittest.TestCase):
 
         self.assertRaises(PbReportError, _test)
 
-    def test_attribute_null_value(self):
-        """Can't create an attribute without a value."""
-        def _test():
-            a = Attribute('bob', None)
-
-        self.assertRaises(PbReportError, _test)
-
     def test_attribute_int_id(self):
         """Test exception of handling Attribute with int ids"""
         def _test():
diff --git a/tests/test_parsers.py b/tests/test_parsers.py
index 45bb8a1..4221e44 100644
--- a/tests/test_parsers.py
+++ b/tests/test_parsers.py
@@ -79,16 +79,41 @@ class TestParsers(unittest.TestCase):
         p.add_boolean("pbcommand.task_options.loud", "loud", default=False,
                       name="Verbose", description="Boolean option")
 
+        p.add_choice_str("pbcommand.task_options.ploidy", "ploidy",
+                         choices=["haploid","diploid"], name="Ploidy",
+                         description="Choice Option", default="haploid")
+        p.add_choice_int("pbcommand.task_options.delta", "delta",
+                         choices=[1,2,3], name="Delta",
+                         description="Int Choice Option", default=1)
+        p.add_choice_float("pbcommand.task_options.epsilon", "epsilon",
+                           choices=[0.01,0.1,1.0], name="Epsilon",
+                           description="Float Choice Option", default=0.1)
+
         pa = p.arg_parser.parser.parse_args
 
         opts = pa(["--n", "250", "--f", "1.2345", "--loud"])
         self.assertEqual(opts.n, 250)
         self.assertEqual(opts.f, 1.2345)
         self.assertTrue(opts.loud)
+        self.assertEqual(opts.ploidy, "haploid")
+        self.assertEqual(opts.delta, 1)
+        self.assertEqual(opts.epsilon, 0.1)
 
         opts2 = pa([])
         self.assertFalse(opts2.loud)
 
+        p.add_input_file_type(FileTypes.JSON,
+            "json",
+            "JSON file",
+            "JSON file description")
+        p.add_output_file_type(
+            file_type=FileTypes.GFF,
+            file_id="gff",
+            name="GFF file",
+            description="GFF file description",
+            default_name="annotations")
+        tc = p.to_contract()
+
     def test_catch_output_file_extension(self):
         p = get_pbparser(
             "pbcommand.tasks.test_parsers",
diff --git a/tests/test_pb_io_conditions.py b/tests/test_pb_io_conditions.py
new file mode 100644
index 0000000..43801a5
--- /dev/null
+++ b/tests/test_pb_io_conditions.py
@@ -0,0 +1,48 @@
+import unittest
+import logging
+import os
+
+from base_utils import get_data_file_from_subdir
+
+from pbcommand.pb_io import load_reseq_conditions_from
+
+
+log = logging.getLogger(__name__)
+
+
+_SERIALIZED_JSON_DIR = 'example-conditions'
+
+
+def _loader(name):
+    file_name = get_data_file_from_subdir(_SERIALIZED_JSON_DIR, name)
+    log.info("loading json report from {f}".format(f=file_name))
+    r = load_reseq_conditions_from(file_name)
+    return r
+
+
+class TestSerializationOfResequencingConditions(unittest.TestCase):
+
+    FILE_NAME = 'reseq-conditions-01.json'
+
+    @classmethod
+    def setUpClass(cls):
+        cls.cs = _loader(cls.FILE_NAME)
+
+    def test_condition_n(self):
+        self.assertEqual(len(self.cs.conditions), 3)
+
+    def test_condition_a(self):
+        log.info(self.cs)
+        self.assertEqual(self.cs.conditions[0].cond_id, "cond_alpha")
+
+    def test_condition_paths_abs(self):
+        for c in self.cs.conditions:
+            self.assertTrue(os.path.isabs(c.subreadset))
+            self.assertTrue(os.path.isabs(c.alignmentset))
+            self.assertTrue(os.path.isabs(c.referenceset))
+
+
+class TestSerializationOfResequencingConditionsWithRelativePath(TestSerializationOfResequencingConditions):
+
+    FILE_NAME = 'reseq-conditions-02.json'
+
diff --git a/tests/test_pb_io_report.py b/tests/test_pb_io_report.py
index be37fa7..a8b0231 100644
--- a/tests/test_pb_io_report.py
+++ b/tests/test_pb_io_report.py
@@ -30,6 +30,9 @@ class TestSerializationOverviewReport(unittest.TestCase):
     def test_id(self):
         self.assertEqual(self.report.id, "overview")
 
+    def test_uuid(self):
+        self.assertEqual(self.report.uuid, "196136c8-f6fd-11e5-b481-3c15c2cc8f88")
+
     def test_title(self):
         self.assertEqual(self.report.title, "Overview Report")
 
diff --git a/tests/test_pb_io_tool_contract.py b/tests/test_pb_io_tool_contract.py
index 7c50590..310537b 100644
--- a/tests/test_pb_io_tool_contract.py
+++ b/tests/test_pb_io_tool_contract.py
@@ -1,8 +1,8 @@
-import os
 import unittest
 import logging
 
-from base_utils import get_data_file, HAS_PBCORE, pbcore_skip_msg, get_temp_file, get_temp_dir
+from base_utils import get_temp_file, get_temp_dir
+from base_utils import get_tool_contract, get_resolved_tool_contract
 
 from pbcommand.models import (ToolContract,
                               ResolvedToolContract,
@@ -21,7 +21,7 @@ class TestLoadToolContract(unittest.TestCase):
 
     def test_01(self):
         file_name = "dev_example_tool_contract.json"
-        path = get_data_file(file_name)
+        path = get_tool_contract(file_name)
         tc = load_tool_contract_from(path)
         self.assertIsInstance(tc, ToolContract)
 
@@ -30,7 +30,7 @@ class TestMalformedToolContract(unittest.TestCase):
 
     def test_tc_no_inputs(self):
         file_name = "dev_example_tool_contract.json"
-        path = get_data_file(file_name)
+        path = get_tool_contract(file_name)
         tc = load_tool_contract_from(path)
         tc.task.input_file_types = []
 
@@ -43,7 +43,7 @@ class TestMalformedToolContract(unittest.TestCase):
 class TestWriteResolvedToolContractAvro(unittest.TestCase):
     def test_01(self):
         file_name = "resolved_tool_contract_dev_app.json"
-        rtc = load_resolved_tool_contract_from(get_data_file(file_name))
+        rtc = load_resolved_tool_contract_from(get_resolved_tool_contract(file_name))
         self.assertIsInstance(rtc, ResolvedToolContract)
 
         d = get_temp_dir("rtc-app")
diff --git a/tests/test_pb_io_tool_contract_v1.py b/tests/test_pb_io_tool_contract_v1.py
new file mode 100644
index 0000000..abdef07
--- /dev/null
+++ b/tests/test_pb_io_tool_contract_v1.py
@@ -0,0 +1,36 @@
+import unittest
+import logging
+
+from base_utils import get_tool_contract_v1
+
+from pbcommand.models import (ToolContract,
+                              MalformedToolContractError)
+
+from pbcommand.pb_io.tool_contract_io import (load_tool_contract_from, )
+
+
+log = logging.getLogger(__name__)
+
+
+class TestLoadToolContract(unittest.TestCase):
+
+    def test_01(self):
+        file_name = "dev_example_tool_contract.json"
+        path = get_tool_contract_v1(file_name)
+        tc = load_tool_contract_from(path)
+        self.assertIsInstance(tc, ToolContract)
+        self.assertEqual(tc.schema_version, "UNKNOWN")
+
+
+class TestMalformedToolContract(unittest.TestCase):
+
+    def test_tc_no_inputs(self):
+        file_name = "dev_example_tool_contract.json"
+        path = get_tool_contract_v1(file_name)
+        tc = load_tool_contract_from(path)
+        tc.task.input_file_types = []
+
+        def _run():
+            return tc.to_dict()
+
+        self.assertRaises(MalformedToolContractError, _run)
diff --git a/tests/test_resolver.py b/tests/test_resolver.py
index 90343b2..45a1ca9 100644
--- a/tests/test_resolver.py
+++ b/tests/test_resolver.py
@@ -1,11 +1,11 @@
 import logging
 import unittest
 
-from base_utils import get_data_file, get_temp_dir
-from pbcommand.models import ResolvedToolContract, ResolvedScatteredToolContractTask, ResolvedGatherToolContractTask
+from base_utils import (get_temp_dir, get_tool_contract, get_resolved_tool_contract)
+from pbcommand.models import ResolvedToolContract, ResolvedToolContractTask, ResolvedScatteredToolContractTask, ResolvedGatherToolContractTask
 
 from pbcommand.pb_io import load_tool_contract_from
-from pbcommand.resolver import resolve_scatter_tool_contract, resolve_gather_tool_contract
+from pbcommand.resolver import resolve_tool_contract, resolve_scatter_tool_contract, resolve_gather_tool_contract, ToolContractError
 
 log = logging.getLogger(__name__)
 
@@ -21,7 +21,7 @@ class TestScatterResolver(unittest.TestCase):
 
     def test_sanity(self):
         d = get_temp_dir("resolved-tool-contract")
-        tc = load_tool_contract_from(get_data_file(self.FILE_NAME))
+        tc = load_tool_contract_from(get_tool_contract(self.FILE_NAME))
         rtc = resolve_scatter_tool_contract(tc, self.INPUT_FILES, d, d, self.MAX_NPROC, self.TOOL_OPTIONS, self.MAX_NCHUNKS, self.CHUNK_KEYS, False)
         self.assertIsInstance(rtc, ResolvedToolContract)
         self.assertIsInstance(rtc.task, ResolvedScatteredToolContractTask)
@@ -40,9 +40,68 @@ class TestGatherResolver(unittest.TestCase):
 
     def test_sanity(self):
         d = get_temp_dir("resolved-tool-contract")
-        tc = load_tool_contract_from(get_data_file(self.FILE_NAME))
+        tc = load_tool_contract_from(get_tool_contract(self.FILE_NAME))
         rtc = resolve_gather_tool_contract(tc, self.INPUT_FILES, d, d, self.MAX_NPROC, self.TOOL_OPTIONS, self.CHUNK_KEY, False)
         self.assertIsInstance(rtc, ResolvedToolContract)
         self.assertIsInstance(rtc.task, ResolvedGatherToolContractTask)
         self.assertEqual(rtc.task.chunk_key, self.CHUNK_KEY)
         self.assertEqual(rtc.task.is_distributed, False)
+
+
+def _to_id(i):
+    return "pbcommand.task_options.{i}".format(i=i)
+
+
+class TestResolver(unittest.TestCase):
+    FILE_NAME = "dev_mixed_app_tool_contract.json"
+    MAX_NPROC = 1
+    INPUT_FILES = ['/tmp/file.csv']
+    PLOIDY = _to_id("ploidy")
+    ALPHA = _to_id("alpha")
+    BETA = _to_id("beta")
+    GAMMA = _to_id("gamma")
+    DELTA = _to_id("delta")
+    EPS = _to_id("epsilon")
+    COMMENTS = _to_id("comment")
+
+    def test_sanity(self):
+        d = get_temp_dir("resolved-tool-contract")
+        tc = load_tool_contract_from(get_tool_contract(self.FILE_NAME))
+        tool_options = {}
+        rtc = resolve_tool_contract(tc, self.INPUT_FILES, d, d, self.MAX_NPROC, tool_options, False)
+        self.assertIsInstance(rtc, ResolvedToolContract)
+        self.assertIsInstance(rtc.task, ResolvedToolContractTask)
+        self.assertEqual(rtc.task.is_distributed, False)
+        self.assertEqual(rtc.task.options[self.ALPHA], 25)
+        self.assertEqual(rtc.task.options[self.BETA], 1.234)
+        self.assertEqual(rtc.task.options[self.GAMMA], True)
+        self.assertEqual(rtc.task.options[self.PLOIDY], "haploid")
+        self.assertEqual(rtc.task.options[self.DELTA], 1)
+        self.assertEqual(rtc.task.options[self.EPS], 0.1)
+        self.assertEqual(rtc.task.options[self.COMMENTS], "asdf")
+        # non-defaults
+        tool_options = {self.ALPHA: 15, self.BETA: 2.5, self.GAMMA: False, self.PLOIDY: "diploid", self.DELTA: 2, self.EPS: 1.0, self.COMMENTS: "Hello, world!"}
+        rtc = resolve_tool_contract(tc, self.INPUT_FILES, d, d, self.MAX_NPROC, tool_options, False)
+        self.assertEqual(rtc.task.options[self.ALPHA], 15)
+        self.assertEqual(rtc.task.options[self.BETA], 2.5)
+        self.assertEqual(rtc.task.options[self.GAMMA], False)
+        self.assertEqual(rtc.task.options[self.PLOIDY], "diploid")
+        self.assertEqual(rtc.task.options[self.DELTA], 2)
+        self.assertEqual(rtc.task.options[self.EPS], 1.0)
+        self.assertEqual(rtc.task.options[self.COMMENTS], "Hello, world!")
+
+    def test_failure_modes(self):
+        d = get_temp_dir("resolved-tool-contract")
+        tc = load_tool_contract_from(get_tool_contract(self.FILE_NAME))
+        tool_options = {self.PLOIDY: "other"}
+        self.assertRaises(ToolContractError, lambda: resolve_tool_contract(tc, self.INPUT_FILES, d, d, self.MAX_NPROC, tool_options, False))
+        tool_options = {self.ALPHA:2.5}
+        self.assertRaises(ToolContractError, lambda: resolve_tool_contract(tc, self.INPUT_FILES, d, d, self.MAX_NPROC, tool_options, False))
+        tool_options = {self.ALPHA:"abcdef"}
+        self.assertRaises(ToolContractError, lambda: resolve_tool_contract(tc, self.INPUT_FILES, d, d, self.MAX_NPROC, tool_options, False))
+        tool_options = {self.BETA:"asdf"}
+        self.assertRaises(ToolContractError, lambda: resolve_tool_contract(tc, self.INPUT_FILES, d, d, self.MAX_NPROC, tool_options, False))
+        tool_options = {self.GAMMA:1.0}
+        self.assertRaises(ToolContractError, lambda: resolve_tool_contract(tc, self.INPUT_FILES, d, d, self.MAX_NPROC, tool_options, False))
+        tool_options = {self.GAMMA:""}
+        self.assertRaises(ToolContractError, lambda: resolve_tool_contract(tc, self.INPUT_FILES, d, d, self.MAX_NPROC, tool_options, False))
diff --git a/tests/test_schema_validation.py b/tests/test_schema_validation.py
index e24ffa1..94b2348 100644
--- a/tests/test_schema_validation.py
+++ b/tests/test_schema_validation.py
@@ -2,13 +2,23 @@ import json
 import os
 import logging
 import unittest
-from pbcommand.models import ToolContract, ResolvedToolContract
+
+from pbcommand.models import (ToolContract, ResolvedToolContract,
+                              PipelinePreset, PipelineDataStoreViewRules)
+from pbcommand.models.report import Report, ReportSpec
 
 from pbcommand.pb_io import (load_tool_contract_from,
-                             load_resolved_tool_contract_from)
-from pbcommand.schemas import validate_rtc, validate_tc
+                             load_resolved_tool_contract_from,
+                             load_pipeline_presets_from,
+                             load_pipeline_datastore_view_rules_from_json,
+                             load_report_spec_from_json)
+from pbcommand.schemas import (validate_rtc, validate_tc, validate_presets,
+                               validate_datastore_view_rules,
+                               validate_report_spec)
+from pbcommand.utils import walker
+
+from base_utils import DATA_DIR_RTC, DATA_DIR_TC, DATA_DIR_PRESETS, DATA_DIR_DSVIEW, DATA_DIR_REPORT_SPECS
 
-from base_utils import DATA_DIR
 
 log = logging.getLogger(__name__)
 
@@ -19,24 +29,15 @@ def _to_json(path):
     return d
 
 
-def _filter_rtc(path):
-    return path.endswith('resolved_tool_contract.json')
-
-
-def _filter_tc(path):
-    return path.endswith('tool_contract.json') and not path.endswith('resolved_tool_contract.json')
-
-
-def _get_all_from(root_dir, filter_func):
-    for path in os.listdir(root_dir):
-        if filter_func(path):
-            yield os.path.join(root_dir, path)
+def json_filter(path):
+    return path.endswith(".json")
 
 
 def _to_assertion(path, schema_validate_func):
     def test_is_validate(self):
         d = _to_json(path)
-        log.debug(d)
+        # log.debug(d)
+        log.info("Attempting to validate '{}'".format(path))
         is_valid = schema_validate_func(d)
         log.info(" is-valid? {i} {p}".format(i=is_valid, p=path))
         self.assertTrue(is_valid, "{p} is not valid with the avro schema".format(p=path))
@@ -45,7 +46,7 @@ def _to_assertion(path, schema_validate_func):
 
 class ValidateResolvedToolContracts(unittest.TestCase):
     def test_validate_resolved_tool_contracts(self):
-        for path in _get_all_from(DATA_DIR, _filter_rtc):
+        for path in walker(DATA_DIR_RTC, json_filter):
             f = _to_assertion(path, validate_rtc)
             f(self)
             self.assertIsInstance(load_resolved_tool_contract_from(path), ResolvedToolContract)
@@ -53,7 +54,35 @@ class ValidateResolvedToolContracts(unittest.TestCase):
 
 class ValidateToolContracts(unittest.TestCase):
     def test_validate_tool_contracts(self):
-        for path in _get_all_from(DATA_DIR, _filter_tc):
+        for path in walker(DATA_DIR_TC, json_filter):
             f = _to_assertion(path, validate_tc)
             f(self)
-            self.assertIsInstance(load_tool_contract_from(path), ToolContract)
\ No newline at end of file
+            self.assertIsInstance(load_tool_contract_from(path), ToolContract)
+
+
+class ValidatePipelinePreset(unittest.TestCase):
+    def test_validate_pipeline_presets(self):
+        for path in walker(DATA_DIR_PRESETS, json_filter):
+            f = _to_assertion(path, validate_presets)
+            f(self)
+            self.assertIsInstance(load_pipeline_presets_from(path), PipelinePreset)
+
+
+class ValidateDataStoreViewRules(unittest.TestCase):
+    def test_validate_pipeline_datastore_view_rules(self):
+        for path in walker(DATA_DIR_DSVIEW, json_filter):
+            f = _to_assertion(path, validate_datastore_view_rules)
+            f(self)
+            self.assertIsInstance(
+                load_pipeline_datastore_view_rules_from_json(path),
+                PipelineDataStoreViewRules)
+
+
+class ValidateReportSpec(unittest.TestCase):
+    def test_validate_report_spec(self):
+        for path in walker(DATA_DIR_REPORT_SPECS, json_filter):
+            if os.path.basename(path).startswith("report-specs"):
+                f = _to_assertion(path, validate_report_spec)
+                f(self)
+                self.assertIsInstance(load_report_spec_from_json(path),
+                                      ReportSpec)
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 856a581..8a292e9 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -1,9 +1,11 @@
 import functools
+import tempfile
 import unittest
 import argparse
 import logging
 
-from pbcommand.utils import Singleton, compose, get_parsed_args_log_level
+from pbcommand.utils import (Singleton, compose, get_parsed_args_log_level,
+    get_dataset_metadata)
 
 
 class TestSingleton(unittest.TestCase):
@@ -91,3 +93,22 @@ class TestLogging(unittest.TestCase):
         p = _get_argparser(logging.NOTSET).parse_args([])
         l = get_parsed_args_log_level(p)
         self.assertEqual(l, logging.NOTSET)
+
+
+class TestUtils(unittest.TestCase):
+
+    def test_get_dataset_metadata(self):
+        try:
+            import pbtestdata
+        except ImportError:
+            raise unittest.SkipTest("pbtestdata not available, skipping")
+        else:
+            md = get_dataset_metadata(pbtestdata.get_file("subreads-xml"))
+            self.assertEqual(md.metatype, "PacBio.DataSet.SubreadSet")
+            try:
+                from pbcore.io import SubreadSet
+            except ImportError:
+                raise unittest.SkipTest("pbcore not available, skipping")
+            else:
+                ds = SubreadSet(pbtestdata.get_file("subreads-xml"))
+                self.assertEqual(md.uuid, ds.uuid)
diff --git a/tox.ini b/tox.ini
index 8cc4324..62adb0f 100644
--- a/tox.ini
+++ b/tox.ini
@@ -10,5 +10,5 @@ envlist = py27
 [testenv]
 commands = nosetests -s --verbose --logging-config log_nose.cfg
 deps =
-    numpy
+    requests
     nose 

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python-pbcommand.git



More information about the debian-med-commit mailing list